1#[cfg(feature = "component-model")]
2use crate::component::Component;
3use crate::prelude::*;
4use crate::runtime::vm::MmapVec;
5use crate::{CodeBuilder, CodeMemory, Engine, Module};
6use object::write::WritableBuffer;
7use std::sync::Arc;
8use wasmtime_environ::{FinishedObject, ObjectBuilder};
9
10impl<'a> CodeBuilder<'a> {
11 fn compile_cached<T, S>(
12 &self,
13 build_artifacts: fn(
14 &Engine,
15 &[u8],
16 Option<&[u8]>,
17 Option<&str>,
18 &S,
19 ) -> Result<(MmapVecWrapper, Option<T>)>,
20 state: &S,
21 ) -> Result<(Arc<CodeMemory>, Option<T>)> {
22 let wasm = self.get_wasm()?;
23 let dwarf_package = self.get_dwarf_package();
24 let unsafe_intrinsics_import = self.get_unsafe_intrinsics_import();
25
26 self.engine
27 .check_compatible_with_native_host()
28 .context("compilation settings are not compatible with the native host")?;
29
30 #[cfg(feature = "cache")]
31 {
32 let state = (
33 crate::compile::HashedEngineCompileEnv(self.engine),
34 &wasm,
35 &dwarf_package,
36 &unsafe_intrinsics_import,
37 NotHashed(build_artifacts),
39 NotHashed(state),
43 );
44 let (code, info_and_types) =
45 wasmtime_cache::ModuleCacheEntry::new("wasmtime", self.engine.cache())
46 .get_data_raw(
47 &state,
48 |(
50 engine,
51 wasm,
52 dwarf_package,
53 unsafe_intrinsics_import,
54 build_artifacts,
55 state,
56 )|
57 -> Result<_> {
58 let (mmap, info) = (build_artifacts.0)(
59 engine.0,
60 wasm,
61 dwarf_package.as_deref(),
62 **unsafe_intrinsics_import,
63 state.0,
64 )?;
65 let code = publish_mmap(engine.0, mmap.0)?;
66 Ok((code, info))
67 },
68 |(_engine, _wasm, _, _, _, _), (code, _info_and_types)| {
70 Some(code.mmap().to_vec())
71 },
72 |(engine, wasm, _, _, _, _), serialized_bytes| {
74 let kind = if wasmparser::Parser::is_component(&wasm) {
75 wasmtime_environ::ObjectKind::Component
76 } else {
77 wasmtime_environ::ObjectKind::Module
78 };
79 let code = engine.0.load_code_bytes(&serialized_bytes, kind).ok()?;
80 Some((code, None))
81 },
82 )?;
83 return Ok((code, info_and_types));
84
85 struct NotHashed<T>(T);
86
87 impl<T> std::hash::Hash for NotHashed<T> {
88 fn hash<H: std::hash::Hasher>(&self, _hasher: &mut H) {}
89 }
90 }
91
92 #[cfg(not(feature = "cache"))]
93 {
94 let (mmap, info_and_types) = build_artifacts(
95 self.engine,
96 &wasm,
97 dwarf_package.as_deref(),
98 unsafe_intrinsics_import,
99 state,
100 )?;
101 let code = publish_mmap(self.engine, mmap.0)?;
102 return Ok((code, info_and_types));
103 }
104 }
105
106 pub fn compile_module(&self) -> Result<Module> {
112 ensure!(
113 self.get_unsafe_intrinsics_import().is_none(),
114 "`CodeBuilder::expose_unsafe_intrinsics` can only be used with components"
115 );
116
117 #[cfg(feature = "compile-time-builtins")]
118 ensure!(
119 self.get_compile_time_builtins().is_empty(),
120 "compile-time builtins can only be used with components"
121 );
122
123 let custom_alignment = self.custom_alignment();
124 let (code, info_and_types) = self.compile_cached(
125 |engine, wasm, dwarf, unsafe_intrinsics_import, state| {
126 assert!(unsafe_intrinsics_import.is_none());
127 super::build_module_artifacts(engine, wasm, dwarf, state)
128 },
129 &custom_alignment,
130 )?;
131 Module::from_parts(self.engine, code, info_and_types)
132 }
133
134 #[cfg(feature = "component-model")]
137 pub fn compile_component(&self) -> Result<Component> {
138 let custom_alignment = self.custom_alignment();
139 let (code, artifacts) = self.compile_cached(
140 |engine, wasm, dwarf, unsafe_intrinsics_import, state| {
141 super::build_component_artifacts(
142 engine,
143 wasm,
144 dwarf,
145 unsafe_intrinsics_import,
146 state,
147 )
148 },
149 &custom_alignment,
150 )?;
151 Component::from_parts(self.engine, code, artifacts)
152 }
153
154 fn custom_alignment(&self) -> CustomAlignment {
155 CustomAlignment {
156 alignment: self
157 .engine
158 .custom_code_memory()
159 .map(|c| c.required_alignment())
160 .unwrap_or(1),
161 }
162 }
163}
164
165fn publish_mmap(engine: &Engine, mmap: MmapVec) -> Result<Arc<CodeMemory>> {
166 let mut code = CodeMemory::new(engine, mmap)?;
167 code.publish()?;
168 Ok(Arc::new(code))
169}
170
171pub(crate) struct MmapVecWrapper(pub MmapVec);
172
173pub(crate) struct CustomAlignment {
176 alignment: usize,
177}
178
179impl FinishedObject for MmapVecWrapper {
180 type State = CustomAlignment;
181 fn finish_object(obj: ObjectBuilder<'_>, align: &CustomAlignment) -> Result<Self> {
182 let mut result = ObjectMmap::default();
183 result.alignment = align.alignment;
184 return match obj.finish(&mut result) {
185 Ok(()) => {
186 assert!(result.mmap.is_some(), "no reserve");
187 let mmap = result.mmap.expect("reserve not called");
188 assert_eq!(mmap.len(), result.len);
189 Ok(MmapVecWrapper(mmap))
190 }
191 Err(e) => match result.err.take() {
192 Some(original) => Err(original.context(e)),
193 None => Err(e),
194 },
195 };
196
197 #[derive(Default)]
205 struct ObjectMmap {
206 mmap: Option<MmapVec>,
207 len: usize,
208 alignment: usize,
209 err: Option<Error>,
210 }
211
212 impl WritableBuffer for ObjectMmap {
213 fn len(&self) -> usize {
214 self.len
215 }
216
217 fn reserve(&mut self, additional: usize) -> Result<(), ()> {
218 assert!(self.mmap.is_none(), "cannot reserve twice");
219 self.mmap = match MmapVec::with_capacity_and_alignment(additional, self.alignment) {
220 Ok(mmap) => Some(mmap),
221 Err(e) => {
222 self.err = Some(e);
223 return Err(());
224 }
225 };
226 Ok(())
227 }
228
229 fn resize(&mut self, new_len: usize) {
230 if new_len <= self.len {
234 return;
235 }
236 self.len = new_len;
237 }
238
239 fn write_bytes(&mut self, val: &[u8]) {
240 let mmap = self.mmap.as_mut().expect("write before reserve");
241 unsafe {
244 mmap.as_mut_slice()[self.len..][..val.len()].copy_from_slice(val);
245 }
246 self.len += val.len();
247 }
248 }
249 }
250}