wasmtime/runtime/store/
data.rs1use crate::module::ModuleRegistry;
2use crate::runtime::vm::{self, GcStore, VMStore};
3use crate::store::StoreOpaque;
4use crate::{Engine, StoreContext, StoreContextMut};
5use core::num::NonZeroU64;
6use core::ops::{Index, IndexMut};
7use core::pin::Pin;
8
9#[derive(Copy, Clone, Debug, PartialEq, Eq)]
14pub struct InstanceId(u32);
15wasmtime_environ::entity_impl!(InstanceId);
16
17pub struct StoreData {
18 id: StoreId,
19 #[cfg(feature = "component-model")]
20 pub(crate) components: crate::component::ComponentStoreData,
21}
22
23impl StoreData {
24 pub fn new(engine: &Engine) -> StoreData {
25 #[cfg(not(feature = "component-model"))]
26 let _ = engine;
27 StoreData {
28 id: StoreId::allocate(),
29 #[cfg(feature = "component-model")]
30 components: crate::component::ComponentStoreData::new(engine),
31 }
32 }
33
34 pub fn id(&self) -> StoreId {
35 self.id
36 }
37
38 pub fn run_manual_drop_routines<T>(store: StoreContextMut<T>) {
39 #[cfg(feature = "component-model")]
40 crate::component::ComponentStoreData::run_manual_drop_routines(store);
41 #[cfg(not(feature = "component-model"))]
42 let _ = store;
43 }
44
45 pub fn decrement_allocator_resources(&mut self, allocator: &dyn vm::InstanceAllocator) {
46 #[cfg(feature = "component-model")]
47 self.components.decrement_allocator_resources(allocator);
48 #[cfg(not(feature = "component-model"))]
49 let _ = allocator;
50 }
51}
52
53impl<I> Index<I> for StoreOpaque
55where
56 StoreData: Index<I>,
57{
58 type Output = <StoreData as Index<I>>::Output;
59
60 #[inline]
61 fn index(&self, index: I) -> &Self::Output {
62 self.store_data.index(index)
63 }
64}
65
66impl<I> IndexMut<I> for StoreOpaque
67where
68 StoreData: IndexMut<I>,
69{
70 #[inline]
71 fn index_mut(&mut self, index: I) -> &mut Self::Output {
72 self.store_data.index_mut(index)
73 }
74}
75
76impl<I, T> Index<I> for StoreContext<'_, T>
78where
79 StoreOpaque: Index<I>,
80{
81 type Output = <StoreOpaque as Index<I>>::Output;
82
83 #[inline]
84 fn index(&self, index: I) -> &Self::Output {
85 self.0.index(index)
86 }
87}
88
89impl<I, T> Index<I> for StoreContextMut<'_, T>
91where
92 StoreOpaque: Index<I>,
93{
94 type Output = <StoreOpaque as Index<I>>::Output;
95
96 #[inline]
97 fn index(&self, index: I) -> &Self::Output {
98 self.0.index(index)
99 }
100}
101
102impl<I, T> IndexMut<I> for StoreContextMut<'_, T>
103where
104 StoreOpaque: IndexMut<I>,
105{
106 #[inline]
107 fn index_mut(&mut self, index: I) -> &mut Self::Output {
108 self.0.index_mut(index)
109 }
110}
111
112impl<I> Index<I> for dyn VMStore + '_
114where
115 StoreOpaque: Index<I>,
116{
117 type Output = <StoreOpaque as Index<I>>::Output;
118
119 fn index(&self, index: I) -> &Self::Output {
120 self.store_opaque().index(index)
121 }
122}
123
124impl<I> IndexMut<I> for dyn VMStore + '_
125where
126 StoreOpaque: IndexMut<I>,
127{
128 fn index_mut(&mut self, index: I) -> &mut Self::Output {
129 self.store_opaque_mut().index_mut(index)
130 }
131}
132
133#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
141#[repr(transparent)] pub struct StoreId(NonZeroU64);
143
144impl StoreId {
145 pub fn allocate() -> StoreId {
148 const OVERFLOW_THRESHOLD: u64 = 1 << 63;
154
155 #[cfg(target_has_atomic = "64")]
156 let id = {
157 use core::sync::atomic::{AtomicU64, Ordering::Relaxed};
158
159 static NEXT_ID: AtomicU64 = AtomicU64::new(0);
163 let id = NEXT_ID.fetch_add(1, Relaxed);
164 if id > OVERFLOW_THRESHOLD {
165 NEXT_ID.store(OVERFLOW_THRESHOLD, Relaxed);
166 panic!("store id allocator overflow");
167 }
168 id
169 };
170
171 #[cfg(not(target_has_atomic = "64"))]
176 let id = {
177 use crate::sync::RwLock;
178 static NEXT_ID: RwLock<u64> = RwLock::new(0);
179
180 let mut lock = NEXT_ID.write();
181 if *lock > OVERFLOW_THRESHOLD {
182 panic!("store id allocator overflow");
183 }
184 let ret = *lock;
185 *lock += 1;
186 ret
187 };
188
189 StoreId(NonZeroU64::new(id + 1).unwrap())
190 }
191
192 #[inline]
193 pub fn assert_belongs_to(&self, store: StoreId) {
194 if *self == store {
195 return;
196 }
197 store_id_mismatch();
198 }
199
200 pub fn as_raw(&self) -> NonZeroU64 {
202 self.0
203 }
204
205 pub fn from_raw(id: NonZeroU64) -> StoreId {
207 StoreId(id)
208 }
209}
210
211#[cold]
212fn store_id_mismatch() {
213 panic!("object used with the wrong store");
214}
215
216#[repr(C)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
226pub struct StoreInstanceId {
227 store_id: StoreId,
228 instance: InstanceId,
229}
230
231impl StoreInstanceId {
232 pub(crate) fn new(store_id: StoreId, instance: InstanceId) -> StoreInstanceId {
233 StoreInstanceId { store_id, instance }
234 }
235
236 #[inline]
237 pub fn assert_belongs_to(&self, store: StoreId) {
238 self.store_id.assert_belongs_to(store)
239 }
240
241 #[inline]
242 pub fn store_id(&self) -> StoreId {
243 self.store_id
244 }
245
246 #[inline]
247 pub(crate) fn instance(&self) -> InstanceId {
248 self.instance
249 }
250
251 #[inline]
257 pub(crate) fn get<'a>(&self, store: &'a StoreOpaque) -> &'a vm::Instance {
258 self.assert_belongs_to(store.id());
259 store.instance(self.instance)
260 }
261
262 #[inline]
268 pub(crate) fn get_mut<'a>(&self, store: &'a mut StoreOpaque) -> Pin<&'a mut vm::Instance> {
269 self.assert_belongs_to(store.id());
270 store.instance_mut(self.instance)
271 }
272
273 #[inline]
280 pub(crate) fn get_mut_and_module_registry<'a>(
281 &self,
282 store: &'a mut StoreOpaque,
283 ) -> (Pin<&'a mut vm::Instance>, &'a ModuleRegistry) {
284 self.assert_belongs_to(store.id());
285 store.instance_and_module_registry_mut(self.instance)
286 }
287
288 #[inline]
290 pub(crate) fn get_with_gc_store_mut<'a>(
291 &self,
292 store: &'a mut StoreOpaque,
293 ) -> (Option<&'a mut GcStore>, Pin<&'a mut vm::Instance>) {
294 self.assert_belongs_to(store.id());
295 store.optional_gc_store_and_instance_mut(self.instance)
296 }
297}
298
299impl Index<StoreInstanceId> for StoreOpaque {
300 type Output = vm::Instance;
301
302 #[inline]
303 fn index(&self, id: StoreInstanceId) -> &Self::Output {
304 id.get(self)
305 }
306}