wasmtime/runtime/vm/memory/
shared_memory.rs1use crate::Engine;
2use crate::prelude::*;
3use crate::runtime::vm::memory::{LocalMemory, MmapMemory, validate_atomic_addr};
4use crate::runtime::vm::parking_spot::{ParkingSpot, Waiter};
5use crate::runtime::vm::{self, Memory, VMMemoryDefinition, WaitResult};
6use std::cell::RefCell;
7use std::ops::Range;
8use std::ptr::NonNull;
9use std::sync::atomic::{AtomicU32, AtomicU64, Ordering};
10use std::sync::{Arc, RwLock};
11use std::time::{Duration, Instant};
12use wasmtime_environ::Trap;
13
14#[derive(Clone)]
23pub struct SharedMemory(Arc<SharedMemoryInner>);
24
25struct SharedMemoryInner {
26 memory: RwLock<LocalMemory>,
27 spot: ParkingSpot,
28 ty: wasmtime_environ::Memory,
29 def: LongTermVMMemoryDefinition,
30}
31
32impl SharedMemory {
33 pub fn new(engine: &Engine, ty: &wasmtime_environ::Memory) -> Result<Self> {
35 let tunables = engine.tunables();
36 let (minimum_bytes, maximum_bytes) = vm::assert_ready(Memory::limit_new(ty, None))?;
39 let mmap_memory = MmapMemory::new(ty, tunables, minimum_bytes, maximum_bytes)?;
40 Self::wrap(
41 engine,
42 ty,
43 LocalMemory::new(ty, tunables, Box::new(mmap_memory), None)?,
44 )
45 }
46
47 pub fn wrap(
49 engine: &Engine,
50 ty: &wasmtime_environ::Memory,
51 memory: LocalMemory,
52 ) -> Result<Self> {
53 if !engine.config().shared_memory {
54 bail!(
55 "shared memory support is disabled for this engine -- see `Config::shared_memory`"
56 );
57 }
58 if !ty.shared {
59 bail!("shared memory must have a `shared` memory type");
60 }
61 Ok(Self(Arc::new(SharedMemoryInner {
62 ty: *ty,
63 spot: ParkingSpot::default(),
64 def: LongTermVMMemoryDefinition(memory.vmmemory()),
65 memory: RwLock::new(memory),
66 })))
67 }
68
69 pub fn ty(&self) -> wasmtime_environ::Memory {
71 self.0.ty
72 }
73
74 pub fn as_memory(self) -> Memory {
76 Memory::Shared(self)
77 }
78
79 pub fn vmmemory_ptr(&self) -> NonNull<VMMemoryDefinition> {
81 NonNull::from(&self.0.def.0)
82 }
83
84 pub fn grow(&self, delta_pages: u64) -> Result<Option<(usize, usize)>, Error> {
86 let mut memory = self.0.memory.write().unwrap();
87 let result = vm::assert_ready(memory.grow(delta_pages, None))?;
90 if let Some((_old_size_in_bytes, new_size_in_bytes)) = result {
91 self.0
111 .def
112 .0
113 .current_length
114 .store(new_size_in_bytes, Ordering::SeqCst);
115 }
116 Ok(result)
117 }
118
119 pub fn atomic_notify(&self, addr_index: u64, count: u32) -> Result<u32, Trap> {
121 let ptr = validate_atomic_addr(&self.0.def.0, addr_index, 4, 4)?;
122 log::trace!("memory.atomic.notify(addr={addr_index:#x}, count={count})");
123 let ptr = unsafe { &*ptr };
124 Ok(self.0.spot.notify(ptr, count))
125 }
126
127 pub fn atomic_wait32(
129 &self,
130 addr_index: u64,
131 expected: u32,
132 timeout: Option<Duration>,
133 ) -> Result<WaitResult, Trap> {
134 let addr = validate_atomic_addr(&self.0.def.0, addr_index, 4, 4)?;
135 log::trace!(
136 "memory.atomic.wait32(addr={addr_index:#x}, expected={expected}, timeout={timeout:?})"
137 );
138
139 assert!(std::mem::size_of::<AtomicU32>() == 4);
141 assert!(std::mem::align_of::<AtomicU32>() <= 4);
142 let atomic = unsafe { AtomicU32::from_ptr(addr.cast()) };
143 let deadline = timeout.map(|d| Instant::now() + d);
144
145 WAITER.with(|waiter| {
146 let mut waiter = waiter.borrow_mut();
147 Ok(self.0.spot.wait32(atomic, expected, deadline, &mut waiter))
148 })
149 }
150
151 pub fn atomic_wait64(
153 &self,
154 addr_index: u64,
155 expected: u64,
156 timeout: Option<Duration>,
157 ) -> Result<WaitResult, Trap> {
158 let addr = validate_atomic_addr(&self.0.def.0, addr_index, 8, 8)?;
159 log::trace!(
160 "memory.atomic.wait64(addr={addr_index:#x}, expected={expected}, timeout={timeout:?})"
161 );
162
163 assert!(std::mem::size_of::<AtomicU64>() == 8);
165 assert!(std::mem::align_of::<AtomicU64>() <= 8);
166 let atomic = unsafe { AtomicU64::from_ptr(addr.cast()) };
167 let deadline = timeout.map(|d| Instant::now() + d);
168
169 WAITER.with(|waiter| {
170 let mut waiter = waiter.borrow_mut();
171 Ok(self.0.spot.wait64(atomic, expected, deadline, &mut waiter))
172 })
173 }
174
175 pub(crate) fn page_size(&self) -> u64 {
176 self.0.ty.page_size()
177 }
178
179 pub(crate) fn byte_size(&self) -> usize {
180 self.0.memory.read().unwrap().byte_size()
181 }
182
183 pub(crate) fn needs_init(&self) -> bool {
184 self.0.memory.read().unwrap().needs_init()
185 }
186
187 pub(crate) fn wasm_accessible(&self) -> Range<usize> {
188 self.0.memory.read().unwrap().wasm_accessible()
189 }
190}
191
192thread_local! {
193 static WAITER: RefCell<Waiter> = const { RefCell::new(Waiter::new()) };
196}
197
198struct LongTermVMMemoryDefinition(VMMemoryDefinition);
207unsafe impl Send for LongTermVMMemoryDefinition {}
208unsafe impl Sync for LongTermVMMemoryDefinition {}