wasmtime/runtime/vm/memory/
shared_memory.rs1use crate::prelude::*;
2use crate::runtime::vm::memory::{validate_atomic_addr, LocalMemory, MmapMemory};
3use crate::runtime::vm::parking_spot::{ParkingSpot, Waiter};
4use crate::runtime::vm::{Memory, VMMemoryDefinition, VMStore, WaitResult};
5use std::cell::RefCell;
6use std::ops::Range;
7use std::ptr::NonNull;
8use std::sync::atomic::{AtomicU32, AtomicU64, Ordering};
9use std::sync::{Arc, RwLock};
10use std::time::{Duration, Instant};
11use wasmtime_environ::{Trap, Tunables};
12
13#[derive(Clone)]
22pub struct SharedMemory(Arc<SharedMemoryInner>);
23
24struct SharedMemoryInner {
25 memory: RwLock<LocalMemory>,
26 spot: ParkingSpot,
27 ty: wasmtime_environ::Memory,
28 def: LongTermVMMemoryDefinition,
29}
30
31impl SharedMemory {
32 pub fn new(ty: &wasmtime_environ::Memory, tunables: &Tunables) -> Result<Self> {
34 let (minimum_bytes, maximum_bytes) = Memory::limit_new(ty, None)?;
35 let mmap_memory = MmapMemory::new(ty, tunables, minimum_bytes, maximum_bytes)?;
36 Self::wrap(
37 ty,
38 LocalMemory::new(ty, tunables, Box::new(mmap_memory), None)?,
39 )
40 }
41
42 pub fn wrap(ty: &wasmtime_environ::Memory, mut memory: LocalMemory) -> Result<Self> {
44 if !ty.shared {
45 bail!("shared memory must have a `shared` memory type");
46 }
47 Ok(Self(Arc::new(SharedMemoryInner {
48 ty: *ty,
49 spot: ParkingSpot::default(),
50 def: LongTermVMMemoryDefinition(memory.vmmemory()),
51 memory: RwLock::new(memory),
52 })))
53 }
54
55 pub fn ty(&self) -> wasmtime_environ::Memory {
57 self.0.ty
58 }
59
60 pub fn as_memory(self) -> Memory {
62 Memory::Shared(self)
63 }
64
65 pub fn vmmemory_ptr(&self) -> NonNull<VMMemoryDefinition> {
67 NonNull::from(&self.0.def.0)
68 }
69
70 pub fn grow(
72 &self,
73 delta_pages: u64,
74 store: Option<&mut dyn VMStore>,
75 ) -> Result<Option<(usize, usize)>, Error> {
76 let mut memory = self.0.memory.write().unwrap();
77 let result = memory.grow(delta_pages, store)?;
78 if let Some((_old_size_in_bytes, new_size_in_bytes)) = result {
79 self.0
99 .def
100 .0
101 .current_length
102 .store(new_size_in_bytes, Ordering::SeqCst);
103 }
104 Ok(result)
105 }
106
107 pub fn atomic_notify(&self, addr_index: u64, count: u32) -> Result<u32, Trap> {
109 let ptr = validate_atomic_addr(&self.0.def.0, addr_index, 4, 4)?;
110 log::trace!("memory.atomic.notify(addr={addr_index:#x}, count={count})");
111 let ptr = unsafe { &*ptr };
112 Ok(self.0.spot.notify(ptr, count))
113 }
114
115 pub fn atomic_wait32(
117 &self,
118 addr_index: u64,
119 expected: u32,
120 timeout: Option<Duration>,
121 ) -> Result<WaitResult, Trap> {
122 let addr = validate_atomic_addr(&self.0.def.0, addr_index, 4, 4)?;
123 log::trace!(
124 "memory.atomic.wait32(addr={addr_index:#x}, expected={expected}, timeout={timeout:?})"
125 );
126
127 assert!(std::mem::size_of::<AtomicU32>() == 4);
129 assert!(std::mem::align_of::<AtomicU32>() <= 4);
130 let atomic = unsafe { AtomicU32::from_ptr(addr.cast()) };
131 let deadline = timeout.map(|d| Instant::now() + d);
132
133 WAITER.with(|waiter| {
134 let mut waiter = waiter.borrow_mut();
135 Ok(self.0.spot.wait32(atomic, expected, deadline, &mut waiter))
136 })
137 }
138
139 pub fn atomic_wait64(
141 &self,
142 addr_index: u64,
143 expected: u64,
144 timeout: Option<Duration>,
145 ) -> Result<WaitResult, Trap> {
146 let addr = validate_atomic_addr(&self.0.def.0, addr_index, 8, 8)?;
147 log::trace!(
148 "memory.atomic.wait64(addr={addr_index:#x}, expected={expected}, timeout={timeout:?})"
149 );
150
151 assert!(std::mem::size_of::<AtomicU64>() == 8);
153 assert!(std::mem::align_of::<AtomicU64>() <= 8);
154 let atomic = unsafe { AtomicU64::from_ptr(addr.cast()) };
155 let deadline = timeout.map(|d| Instant::now() + d);
156
157 WAITER.with(|waiter| {
158 let mut waiter = waiter.borrow_mut();
159 Ok(self.0.spot.wait64(atomic, expected, deadline, &mut waiter))
160 })
161 }
162
163 pub(crate) fn page_size(&self) -> u64 {
164 self.0.ty.page_size()
165 }
166
167 pub(crate) fn byte_size(&self) -> usize {
168 self.0.memory.read().unwrap().byte_size()
169 }
170
171 pub(crate) fn needs_init(&self) -> bool {
172 self.0.memory.read().unwrap().needs_init()
173 }
174
175 pub(crate) fn wasm_accessible(&self) -> Range<usize> {
176 self.0.memory.read().unwrap().wasm_accessible()
177 }
178}
179
180thread_local! {
181 static WAITER: RefCell<Waiter> = const { RefCell::new(Waiter::new()) };
184}
185
186struct LongTermVMMemoryDefinition(VMMemoryDefinition);
195unsafe impl Send for LongTermVMMemoryDefinition {}
196unsafe impl Sync for LongTermVMMemoryDefinition {}