1use crate::prelude::*;
10use crate::runtime::vm::{
11 SendSyncPtr, VMArrayCallFunction, VMFuncRef, VMGlobalDefinition, VMMemoryDefinition,
12 VMOpaqueContext, VMStore, VMStoreRawPtr, VMWasmCallFunction, ValRaw, VmPtr, VmSafe,
13};
14use alloc::alloc::Layout;
15use alloc::sync::Arc;
16use core::any::Any;
17use core::marker;
18use core::mem;
19use core::mem::offset_of;
20use core::ops::Deref;
21use core::ptr::{self, NonNull};
22use sptr::Strict;
23use wasmtime_environ::component::*;
24use wasmtime_environ::{HostPtr, PrimaryMap, VMSharedTypeIndex};
25
26#[allow(clippy::cast_possible_truncation)] const INVALID_PTR: usize = 0xdead_dead_beef_beef_u64 as usize;
29
30mod libcalls;
31mod resources;
32
33pub use self::resources::{CallContexts, ResourceTable, ResourceTables};
34
35#[repr(C)]
43pub struct ComponentInstance {
44 offsets: VMComponentOffsets<HostPtr>,
46
47 vmctx_self_reference: SendSyncPtr<VMComponentContext>,
50
51 runtime_info: Arc<dyn ComponentRuntimeInfo>,
53
54 component_resource_tables: PrimaryMap<TypeResourceTableIndex, ResourceTable>,
60
61 resource_types: Arc<dyn Any + Send + Sync>,
68
69 store: VMStoreRawPtr,
71
72 vmctx: VMComponentContext,
75}
76
77pub type VMLoweringCallee = extern "C" fn(
116 vmctx: NonNull<VMOpaqueContext>,
117 data: NonNull<u8>,
118 ty: u32,
119 caller_instance: u32,
120 flags: NonNull<VMGlobalDefinition>,
121 opt_memory: *mut VMMemoryDefinition,
122 opt_realloc: *mut VMFuncRef,
123 string_encoding: u8,
124 async_: u8,
125 args_and_results: NonNull<mem::MaybeUninit<ValRaw>>,
126 nargs_and_results: usize,
127) -> bool;
128
129#[derive(Copy, Clone)]
132#[repr(C)]
133pub struct VMLowering {
134 pub callee: VMLoweringCallee,
137 pub data: VmPtr<u8>,
139}
140
141unsafe impl VmSafe for VMLowering {}
143
144#[repr(C)]
154#[repr(align(16))]
157pub struct VMComponentContext {
158 _marker: marker::PhantomPinned,
160}
161
162impl ComponentInstance {
163 pub unsafe fn from_vmctx<R>(
172 vmctx: NonNull<VMComponentContext>,
173 f: impl FnOnce(&mut ComponentInstance) -> R,
174 ) -> R {
175 let mut ptr = vmctx
176 .byte_sub(mem::size_of::<ComponentInstance>())
177 .cast::<ComponentInstance>();
178 f(ptr.as_mut())
179 }
180
181 fn alloc_layout(offsets: &VMComponentOffsets<HostPtr>) -> Layout {
187 let size = mem::size_of::<Self>()
188 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
189 .unwrap();
190 let align = mem::align_of::<Self>();
191 Layout::from_size_align(size, align).unwrap()
192 }
193
194 unsafe fn new_at(
202 ptr: NonNull<ComponentInstance>,
203 alloc_size: usize,
204 offsets: VMComponentOffsets<HostPtr>,
205 runtime_info: Arc<dyn ComponentRuntimeInfo>,
206 resource_types: Arc<dyn Any + Send + Sync>,
207 store: NonNull<dyn VMStore>,
208 ) {
209 assert!(alloc_size >= Self::alloc_layout(&offsets).size());
210
211 let num_tables = runtime_info.component().num_resource_tables;
212 let mut component_resource_tables = PrimaryMap::with_capacity(num_tables);
213 for _ in 0..num_tables {
214 component_resource_tables.push(ResourceTable::default());
215 }
216
217 ptr::write(
218 ptr.as_ptr(),
219 ComponentInstance {
220 offsets,
221 vmctx_self_reference: SendSyncPtr::new(
222 NonNull::new(
223 ptr.as_ptr()
224 .byte_add(mem::size_of::<ComponentInstance>())
225 .cast(),
226 )
227 .unwrap(),
228 ),
229 component_resource_tables,
230 runtime_info,
231 resource_types,
232 store: VMStoreRawPtr(store),
233 vmctx: VMComponentContext {
234 _marker: marker::PhantomPinned,
235 },
236 },
237 );
238
239 (*ptr.as_ptr()).initialize_vmctx();
240 }
241
242 fn vmctx(&self) -> NonNull<VMComponentContext> {
243 let addr = &raw const self.vmctx;
244 let ret = Strict::with_addr(self.vmctx_self_reference.as_ptr(), Strict::addr(addr));
245 NonNull::new(ret).unwrap()
246 }
247
248 unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: u32) -> *const T {
249 self.vmctx()
250 .as_ptr()
251 .byte_add(usize::try_from(offset).unwrap())
252 .cast()
253 }
254
255 unsafe fn vmctx_plus_offset_mut<T: VmSafe>(&mut self, offset: u32) -> *mut T {
256 self.vmctx()
257 .as_ptr()
258 .byte_add(usize::try_from(offset).unwrap())
259 .cast()
260 }
261
262 #[inline]
265 pub fn instance_flags(&self, instance: RuntimeComponentInstanceIndex) -> InstanceFlags {
266 unsafe {
267 let ptr = self
268 .vmctx_plus_offset::<VMGlobalDefinition>(self.offsets.instance_flags(instance))
269 .cast_mut();
270 InstanceFlags(SendSyncPtr::new(NonNull::new(ptr).unwrap()))
271 }
272 }
273
274 pub fn store(&self) -> *mut dyn VMStore {
276 self.store.0.as_ptr()
277 }
278
279 pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
285 unsafe {
286 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_memory(idx));
287 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
288 ret.as_ptr()
289 }
290 }
291
292 pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMFuncRef> {
297 unsafe {
298 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_realloc(idx));
299 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
300 ret.as_non_null()
301 }
302 }
303
304 pub fn runtime_post_return(&self, idx: RuntimePostReturnIndex) -> NonNull<VMFuncRef> {
309 unsafe {
310 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_post_return(idx));
311 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
312 ret.as_non_null()
313 }
314 }
315
316 pub fn lowering(&self, idx: LoweredIndex) -> VMLowering {
322 unsafe {
323 let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
324 debug_assert!(ret.callee as usize != INVALID_PTR);
325 debug_assert!(ret.data.as_ptr() as usize != INVALID_PTR);
326 ret
327 }
328 }
329
330 pub fn trampoline_func_ref(&self, idx: TrampolineIndex) -> NonNull<VMFuncRef> {
339 unsafe {
340 let offset = self.offsets.trampoline_func_ref(idx);
341 let ret = self.vmctx_plus_offset::<VMFuncRef>(offset);
342 debug_assert!(
343 mem::transmute::<Option<VmPtr<VMWasmCallFunction>>, usize>((*ret).wasm_call)
344 != INVALID_PTR
345 );
346 debug_assert!((*ret).vmctx.as_ptr() as usize != INVALID_PTR);
347 NonNull::new(ret.cast_mut()).unwrap()
348 }
349 }
350
351 pub fn set_runtime_memory(
360 &mut self,
361 idx: RuntimeMemoryIndex,
362 ptr: NonNull<VMMemoryDefinition>,
363 ) {
364 unsafe {
365 let storage = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(
366 self.offsets.runtime_memory(idx),
367 );
368 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
369 *storage = ptr.into();
370 }
371 }
372
373 pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
375 unsafe {
376 let storage =
377 self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_realloc(idx));
378 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
379 *storage = ptr.into();
380 }
381 }
382
383 pub fn set_runtime_callback(&mut self, idx: RuntimeCallbackIndex, ptr: NonNull<VMFuncRef>) {
385 unsafe {
386 let storage =
387 self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_callback(idx));
388 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
389 *storage = ptr.into();
390 }
391 }
392
393 pub fn set_runtime_post_return(
395 &mut self,
396 idx: RuntimePostReturnIndex,
397 ptr: NonNull<VMFuncRef>,
398 ) {
399 unsafe {
400 let storage = self
401 .vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_post_return(idx));
402 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
403 *storage = ptr.into();
404 }
405 }
406
407 pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
410 unsafe {
411 debug_assert!(
412 *self.vmctx_plus_offset::<usize>(self.offsets.lowering_callee(idx)) == INVALID_PTR
413 );
414 debug_assert!(
415 *self.vmctx_plus_offset::<usize>(self.offsets.lowering_data(idx)) == INVALID_PTR
416 );
417 *self.vmctx_plus_offset_mut(self.offsets.lowering(idx)) = lowering;
418 }
419 }
420
421 pub fn set_trampoline(
423 &mut self,
424 idx: TrampolineIndex,
425 wasm_call: NonNull<VMWasmCallFunction>,
426 array_call: NonNull<VMArrayCallFunction>,
427 type_index: VMSharedTypeIndex,
428 ) {
429 unsafe {
430 let offset = self.offsets.trampoline_func_ref(idx);
431 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
432 let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx());
433 *self.vmctx_plus_offset_mut(offset) = VMFuncRef {
434 wasm_call: Some(wasm_call.into()),
435 array_call: array_call.into(),
436 type_index,
437 vmctx: vmctx.into(),
438 };
439 }
440 }
441
442 pub fn set_resource_destructor(
447 &mut self,
448 idx: ResourceIndex,
449 dtor: Option<NonNull<VMFuncRef>>,
450 ) {
451 unsafe {
452 let offset = self.offsets.resource_destructor(idx);
453 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
454 *self.vmctx_plus_offset_mut(offset) = dtor.map(VmPtr::from);
455 }
456 }
457
458 pub fn resource_destructor(&self, idx: ResourceIndex) -> Option<NonNull<VMFuncRef>> {
463 unsafe {
464 let offset = self.offsets.resource_destructor(idx);
465 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) != INVALID_PTR);
466 (*self.vmctx_plus_offset::<Option<VmPtr<VMFuncRef>>>(offset)).map(|p| p.as_non_null())
467 }
468 }
469
470 unsafe fn initialize_vmctx(&mut self) {
471 *self.vmctx_plus_offset_mut(self.offsets.magic()) = VMCOMPONENT_MAGIC;
472 *self.vmctx_plus_offset_mut(self.offsets.builtins()) =
473 VmPtr::from(NonNull::from(&libcalls::VMComponentBuiltins::INIT));
474 *self.vmctx_plus_offset_mut(self.offsets.vm_store_context()) =
475 VmPtr::from(self.store.0.as_ref().vm_store_context_ptr());
476
477 for i in 0..self.offsets.num_runtime_component_instances {
478 let i = RuntimeComponentInstanceIndex::from_u32(i);
479 let mut def = VMGlobalDefinition::new();
480 *def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
481 self.instance_flags(i).as_raw().write(def);
482 }
483
484 if cfg!(debug_assertions) {
489 for i in 0..self.offsets.num_lowerings {
490 let i = LoweredIndex::from_u32(i);
491 let offset = self.offsets.lowering_callee(i);
492 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
493 let offset = self.offsets.lowering_data(i);
494 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
495 }
496 for i in 0..self.offsets.num_trampolines {
497 let i = TrampolineIndex::from_u32(i);
498 let offset = self.offsets.trampoline_func_ref(i);
499 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
500 }
501 for i in 0..self.offsets.num_runtime_memories {
502 let i = RuntimeMemoryIndex::from_u32(i);
503 let offset = self.offsets.runtime_memory(i);
504 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
505 }
506 for i in 0..self.offsets.num_runtime_reallocs {
507 let i = RuntimeReallocIndex::from_u32(i);
508 let offset = self.offsets.runtime_realloc(i);
509 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
510 }
511 for i in 0..self.offsets.num_runtime_callbacks {
512 let i = RuntimeCallbackIndex::from_u32(i);
513 let offset = self.offsets.runtime_callback(i);
514 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
515 }
516 for i in 0..self.offsets.num_runtime_post_returns {
517 let i = RuntimePostReturnIndex::from_u32(i);
518 let offset = self.offsets.runtime_post_return(i);
519 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
520 }
521 for i in 0..self.offsets.num_resources {
522 let i = ResourceIndex::from_u32(i);
523 let offset = self.offsets.resource_destructor(i);
524 *self.vmctx_plus_offset_mut(offset) = INVALID_PTR;
525 }
526 }
527 }
528
529 pub fn component(&self) -> &Component {
531 self.runtime_info.component()
532 }
533
534 pub fn component_types(&self) -> &Arc<ComponentTypes> {
536 self.runtime_info.component_types()
537 }
538
539 pub fn realloc_func_ty(&self) -> &Arc<dyn Any + Send + Sync> {
541 self.runtime_info.realloc_func_type()
542 }
543
544 pub fn resource_types(&self) -> &Arc<dyn Any + Send + Sync> {
548 &self.resource_types
549 }
550
551 pub fn resource_owned_by_own_instance(&self, ty: TypeResourceTableIndex) -> bool {
557 let resource = &self.component_types()[ty];
558 let component = self.component();
559 let idx = match component.defined_resource_index(resource.ty) {
560 Some(idx) => idx,
561 None => return false,
562 };
563 resource.instance == component.defined_resource_instances[idx]
564 }
565
566 pub fn resource_new32(&mut self, resource: TypeResourceTableIndex, rep: u32) -> Result<u32> {
569 self.resource_tables().resource_new(Some(resource), rep)
570 }
571
572 pub fn resource_rep32(&mut self, resource: TypeResourceTableIndex, idx: u32) -> Result<u32> {
575 self.resource_tables().resource_rep(Some(resource), idx)
576 }
577
578 pub fn resource_drop(
580 &mut self,
581 resource: TypeResourceTableIndex,
582 idx: u32,
583 ) -> Result<Option<u32>> {
584 self.resource_tables().resource_drop(Some(resource), idx)
585 }
586
587 fn resource_tables(&mut self) -> ResourceTables<'_> {
595 ResourceTables {
596 host_table: None,
597 calls: unsafe { (&mut *self.store()).component_calls() },
598 tables: Some(&mut self.component_resource_tables),
599 }
600 }
601
602 #[inline]
604 pub fn component_resource_tables(
605 &mut self,
606 ) -> &mut PrimaryMap<TypeResourceTableIndex, ResourceTable> {
607 &mut self.component_resource_tables
608 }
609
610 pub fn dtor_and_flags(
616 &self,
617 ty: TypeResourceTableIndex,
618 ) -> (Option<NonNull<VMFuncRef>>, Option<InstanceFlags>) {
619 let resource = self.component_types()[ty].ty;
620 let dtor = self.resource_destructor(resource);
621 let component = self.component();
622 let flags = component.defined_resource_index(resource).map(|i| {
623 let instance = component.defined_resource_instances[i];
624 self.instance_flags(instance)
625 });
626 (dtor, flags)
627 }
628
629 pub(crate) fn resource_transfer_own(
630 &mut self,
631 idx: u32,
632 src: TypeResourceTableIndex,
633 dst: TypeResourceTableIndex,
634 ) -> Result<u32> {
635 let mut tables = self.resource_tables();
636 let rep = tables.resource_lift_own(Some(src), idx)?;
637 tables.resource_lower_own(Some(dst), rep)
638 }
639
640 pub(crate) fn resource_transfer_borrow(
641 &mut self,
642 idx: u32,
643 src: TypeResourceTableIndex,
644 dst: TypeResourceTableIndex,
645 ) -> Result<u32> {
646 let dst_owns_resource = self.resource_owned_by_own_instance(dst);
647 let mut tables = self.resource_tables();
648 let rep = tables.resource_lift_borrow(Some(src), idx)?;
649 if dst_owns_resource {
658 return Ok(rep);
659 }
660 tables.resource_lower_borrow(Some(dst), rep)
661 }
662
663 pub(crate) fn resource_enter_call(&mut self) {
664 self.resource_tables().enter_call()
665 }
666
667 pub(crate) fn resource_exit_call(&mut self) -> Result<()> {
668 self.resource_tables().exit_call()
669 }
670
671 #[cfg(feature = "component-model-async")]
672 pub(crate) fn future_transfer(
673 &mut self,
674 src_idx: u32,
675 src: TypeFutureTableIndex,
676 dst: TypeFutureTableIndex,
677 ) -> Result<u32> {
678 _ = (src_idx, src, dst);
679 todo!()
680 }
681
682 #[cfg(feature = "component-model-async")]
683 pub(crate) fn stream_transfer(
684 &mut self,
685 src_idx: u32,
686 src: TypeStreamTableIndex,
687 dst: TypeStreamTableIndex,
688 ) -> Result<u32> {
689 _ = (src_idx, src, dst);
690 todo!()
691 }
692
693 #[cfg(feature = "component-model-async")]
694 pub(crate) fn error_context_transfer(
695 &mut self,
696 src_idx: u32,
697 src: TypeComponentLocalErrorContextTableIndex,
698 dst: TypeComponentLocalErrorContextTableIndex,
699 ) -> Result<u32> {
700 _ = (src_idx, src, dst);
701 todo!()
702 }
703}
704
705impl VMComponentContext {
706 pub fn instance(&self) -> *mut ComponentInstance {
709 unsafe {
710 (self as *const Self as *mut u8)
711 .offset(-(offset_of!(ComponentInstance, vmctx) as isize))
712 as *mut ComponentInstance
713 }
714 }
715}
716
717pub struct OwnedComponentInstance {
723 ptr: SendSyncPtr<ComponentInstance>,
724}
725
726impl OwnedComponentInstance {
727 pub fn new(
730 runtime_info: Arc<dyn ComponentRuntimeInfo>,
731 resource_types: Arc<dyn Any + Send + Sync>,
732 store: NonNull<dyn VMStore>,
733 ) -> OwnedComponentInstance {
734 let component = runtime_info.component();
735 let offsets = VMComponentOffsets::new(HostPtr, component);
736 let layout = ComponentInstance::alloc_layout(&offsets);
737 unsafe {
738 let ptr = alloc::alloc::alloc_zeroed(layout) as *mut ComponentInstance;
747 let ptr = NonNull::new(ptr).unwrap();
748
749 ComponentInstance::new_at(
750 ptr,
751 layout.size(),
752 offsets,
753 runtime_info,
754 resource_types,
755 store,
756 );
757
758 let ptr = SendSyncPtr::new(ptr);
759 OwnedComponentInstance { ptr }
760 }
761 }
762
763 unsafe fn instance_mut(&mut self) -> &mut ComponentInstance {
768 &mut *self.ptr.as_ptr()
769 }
770
771 pub fn instance_ptr(&self) -> *mut ComponentInstance {
773 self.ptr.as_ptr()
774 }
775
776 pub fn set_runtime_memory(
778 &mut self,
779 idx: RuntimeMemoryIndex,
780 ptr: NonNull<VMMemoryDefinition>,
781 ) {
782 unsafe { self.instance_mut().set_runtime_memory(idx, ptr) }
783 }
784
785 pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
787 unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
788 }
789
790 pub fn set_runtime_callback(&mut self, idx: RuntimeCallbackIndex, ptr: NonNull<VMFuncRef>) {
792 unsafe { self.instance_mut().set_runtime_callback(idx, ptr) }
793 }
794
795 pub fn set_runtime_post_return(
797 &mut self,
798 idx: RuntimePostReturnIndex,
799 ptr: NonNull<VMFuncRef>,
800 ) {
801 unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
802 }
803
804 pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) {
806 unsafe { self.instance_mut().set_lowering(idx, lowering) }
807 }
808
809 pub fn set_trampoline(
811 &mut self,
812 idx: TrampolineIndex,
813 wasm_call: NonNull<VMWasmCallFunction>,
814 array_call: NonNull<VMArrayCallFunction>,
815 type_index: VMSharedTypeIndex,
816 ) {
817 unsafe {
818 self.instance_mut()
819 .set_trampoline(idx, wasm_call, array_call, type_index)
820 }
821 }
822
823 pub fn set_resource_destructor(
825 &mut self,
826 idx: ResourceIndex,
827 dtor: Option<NonNull<VMFuncRef>>,
828 ) {
829 unsafe { self.instance_mut().set_resource_destructor(idx, dtor) }
830 }
831
832 pub fn resource_types_mut(&mut self) -> &mut Arc<dyn Any + Send + Sync> {
834 unsafe { &mut (*self.ptr.as_ptr()).resource_types }
835 }
836}
837
838impl Deref for OwnedComponentInstance {
839 type Target = ComponentInstance;
840 fn deref(&self) -> &ComponentInstance {
841 unsafe { &*self.ptr.as_ptr() }
842 }
843}
844
845impl Drop for OwnedComponentInstance {
846 fn drop(&mut self) {
847 let layout = ComponentInstance::alloc_layout(&self.offsets);
848 unsafe {
849 ptr::drop_in_place(self.ptr.as_ptr());
850 alloc::alloc::dealloc(self.ptr.as_ptr().cast(), layout);
851 }
852 }
853}
854
855impl VMComponentContext {
856 #[inline]
859 pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMComponentContext> {
860 debug_assert_eq!(opaque.as_ref().magic, VMCOMPONENT_MAGIC);
862 opaque.cast()
863 }
864}
865
866impl VMOpaqueContext {
867 #[inline]
869 pub fn from_vmcomponent(ptr: NonNull<VMComponentContext>) -> NonNull<VMOpaqueContext> {
870 ptr.cast()
871 }
872}
873
874#[allow(missing_docs)]
875#[repr(transparent)]
876#[derive(Copy, Clone)]
877pub struct InstanceFlags(SendSyncPtr<VMGlobalDefinition>);
878
879#[allow(missing_docs)]
880impl InstanceFlags {
881 pub unsafe fn from_raw(ptr: NonNull<VMGlobalDefinition>) -> InstanceFlags {
888 InstanceFlags(SendSyncPtr::from(ptr))
889 }
890
891 #[inline]
892 pub unsafe fn may_leave(&self) -> bool {
893 *self.as_raw().as_ref().as_i32() & FLAG_MAY_LEAVE != 0
894 }
895
896 #[inline]
897 pub unsafe fn set_may_leave(&mut self, val: bool) {
898 if val {
899 *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_LEAVE;
900 } else {
901 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_LEAVE;
902 }
903 }
904
905 #[inline]
906 pub unsafe fn may_enter(&self) -> bool {
907 *self.as_raw().as_ref().as_i32() & FLAG_MAY_ENTER != 0
908 }
909
910 #[inline]
911 pub unsafe fn set_may_enter(&mut self, val: bool) {
912 if val {
913 *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_ENTER;
914 } else {
915 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_ENTER;
916 }
917 }
918
919 #[inline]
920 pub unsafe fn needs_post_return(&self) -> bool {
921 *self.as_raw().as_ref().as_i32() & FLAG_NEEDS_POST_RETURN != 0
922 }
923
924 #[inline]
925 pub unsafe fn set_needs_post_return(&mut self, val: bool) {
926 if val {
927 *self.as_raw().as_mut().as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
928 } else {
929 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
930 }
931 }
932
933 #[inline]
934 pub fn as_raw(&self) -> NonNull<VMGlobalDefinition> {
935 self.0.as_non_null()
936 }
937}
938
939pub trait ComponentRuntimeInfo: Send + Sync + 'static {
941 fn component(&self) -> &Component;
943
944 fn component_types(&self) -> &Arc<ComponentTypes>;
946
947 fn realloc_func_type(&self) -> &Arc<dyn Any + Send + Sync>;
949}