1use anyhow::Result;
9use base64::Engine;
10use log::{debug, trace, warn};
11use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13use std::hash::Hash;
14use std::hash::Hasher;
15use std::io::Write;
16use std::path::{Path, PathBuf};
17use std::sync::Arc;
18use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
19use std::time::Duration;
20use std::{fs, io};
21
22#[macro_use] mod config;
24mod worker;
25
26pub use config::{CacheConfig, create_new_config};
27use worker::Worker;
28
29#[derive(Debug, Clone)]
31pub struct Cache {
32 config: CacheConfig,
33 worker: Worker,
34 state: Arc<CacheState>,
35}
36
37macro_rules! generate_config_setting_getter {
38 ($setting:ident: $setting_type:ty) => {
39 #[doc = concat!("Returns ", "`", stringify!($setting), "`.")]
40 pub fn $setting(&self) -> $setting_type {
41 self.config.$setting()
42 }
43 };
44}
45
46impl Cache {
47 pub fn new(mut config: CacheConfig) -> Result<Self> {
55 config.validate()?;
56 Ok(Self {
57 worker: Worker::start_new(&config),
58 config,
59 state: Default::default(),
60 })
61 }
62
63 pub fn from_file(path: Option<&Path>) -> Result<Self> {
81 let config = CacheConfig::from_file(path)?;
82 Self::new(config)
83 }
84
85 generate_config_setting_getter!(worker_event_queue_size: u64);
86 generate_config_setting_getter!(baseline_compression_level: i32);
87 generate_config_setting_getter!(optimized_compression_level: i32);
88 generate_config_setting_getter!(optimized_compression_usage_counter_threshold: u64);
89 generate_config_setting_getter!(cleanup_interval: Duration);
90 generate_config_setting_getter!(optimizing_compression_task_timeout: Duration);
91 generate_config_setting_getter!(allowed_clock_drift_for_files_from_future: Duration);
92 generate_config_setting_getter!(file_count_soft_limit: u64);
93 generate_config_setting_getter!(files_total_size_soft_limit: u64);
94 generate_config_setting_getter!(file_count_limit_percent_if_deleting: u8);
95 generate_config_setting_getter!(files_total_size_limit_percent_if_deleting: u8);
96
97 pub fn directory(&self) -> &PathBuf {
99 &self
100 .config
101 .directory()
102 .expect("directory should be validated in Config::new")
103 }
104
105 #[cfg(test)]
106 fn worker(&self) -> &Worker {
107 &self.worker
108 }
109
110 pub fn cache_hits(&self) -> usize {
112 self.state.hits.load(SeqCst)
113 }
114
115 pub fn cache_misses(&self) -> usize {
117 self.state.misses.load(SeqCst)
118 }
119
120 pub(crate) fn on_cache_get_async(&self, path: impl AsRef<Path>) {
121 self.state.hits.fetch_add(1, SeqCst);
122 self.worker.on_cache_get_async(path)
123 }
124
125 pub(crate) fn on_cache_update_async(&self, path: impl AsRef<Path>) {
126 self.state.misses.fetch_add(1, SeqCst);
127 self.worker.on_cache_update_async(path)
128 }
129}
130
131#[derive(Default, Debug)]
132struct CacheState {
133 hits: AtomicUsize,
134 misses: AtomicUsize,
135}
136
137pub struct ModuleCacheEntry<'cache>(Option<ModuleCacheEntryInner<'cache>>);
139
140struct ModuleCacheEntryInner<'cache> {
141 root_path: PathBuf,
142 cache: &'cache Cache,
143}
144
145struct Sha256Hasher(Sha256);
146
147impl<'cache> ModuleCacheEntry<'cache> {
148 pub fn new(compiler_name: &str, cache: Option<&'cache Cache>) -> Self {
150 Self(cache.map(|cache| ModuleCacheEntryInner::new(compiler_name, cache)))
151 }
152
153 #[cfg(test)]
154 fn from_inner(inner: ModuleCacheEntryInner<'cache>) -> Self {
155 Self(Some(inner))
156 }
157
158 pub fn get_data<T, U, E>(&self, state: T, compute: fn(&T) -> Result<U, E>) -> Result<U, E>
162 where
163 T: Hash,
164 U: Serialize + for<'a> Deserialize<'a>,
165 {
166 self.get_data_raw(
167 &state,
168 compute,
169 |_state, data| postcard::to_allocvec(data).ok(),
170 |_state, data| postcard::from_bytes(&data).ok(),
171 )
172 }
173
174 pub fn get_data_raw<T, U, E>(
182 &self,
183 state: &T,
184 compute: fn(&T) -> Result<U, E>,
187 serialize: fn(&T, &U) -> Option<Vec<u8>>,
188 deserialize: fn(&T, Vec<u8>) -> Option<U>,
189 ) -> Result<U, E>
190 where
191 T: Hash,
192 {
193 let inner = match &self.0 {
194 Some(inner) => inner,
195 None => return compute(state),
196 };
197
198 let mut hasher = Sha256Hasher(Sha256::new());
199 state.hash(&mut hasher);
200 let hash: [u8; 32] = hasher.0.finalize().into();
201 let hash = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&hash);
203
204 if let Some(cached_val) = inner.get_data(&hash) {
205 if let Some(val) = deserialize(state, cached_val) {
206 let mod_cache_path = inner.root_path.join(&hash);
207 inner.cache.on_cache_get_async(&mod_cache_path); return Ok(val);
209 }
210 }
211 let val_to_cache = compute(state)?;
212 if let Some(bytes) = serialize(state, &val_to_cache) {
213 if inner.update_data(&hash, &bytes).is_some() {
214 let mod_cache_path = inner.root_path.join(&hash);
215 inner.cache.on_cache_update_async(&mod_cache_path); }
217 }
218 Ok(val_to_cache)
219 }
220}
221
222impl<'cache> ModuleCacheEntryInner<'cache> {
223 fn new(compiler_name: &str, cache: &'cache Cache) -> Self {
224 let compiler_dir = if cfg!(debug_assertions) {
234 fn self_mtime() -> Option<String> {
235 let path = std::env::current_exe().ok()?;
236 let metadata = path.metadata().ok()?;
237 let mtime = metadata.modified().ok()?;
238 Some(match mtime.duration_since(std::time::UNIX_EPOCH) {
239 Ok(dur) => format!("{}", dur.as_millis()),
240 Err(err) => format!("m{}", err.duration().as_millis()),
241 })
242 }
243 let self_mtime = self_mtime().unwrap_or("no-mtime".to_string());
244 format!(
245 "{comp_name}-{comp_ver}-{comp_mtime}",
246 comp_name = compiler_name,
247 comp_ver = env!("GIT_REV"),
248 comp_mtime = self_mtime,
249 )
250 } else {
251 format!(
252 "{comp_name}-{comp_ver}",
253 comp_name = compiler_name,
254 comp_ver = env!("GIT_REV"),
255 )
256 };
257 let root_path = cache.directory().join("modules").join(compiler_dir);
258
259 Self { root_path, cache }
260 }
261
262 fn get_data(&self, hash: &str) -> Option<Vec<u8>> {
263 let mod_cache_path = self.root_path.join(hash);
264 trace!("get_data() for path: {}", mod_cache_path.display());
265 let compressed_cache_bytes = fs::read(&mod_cache_path).ok()?;
266 let cache_bytes = zstd::decode_all(&compressed_cache_bytes[..])
267 .map_err(|err| warn!("Failed to decompress cached code: {err}"))
268 .ok()?;
269 Some(cache_bytes)
270 }
271
272 fn update_data(&self, hash: &str, serialized_data: &[u8]) -> Option<()> {
273 let mod_cache_path = self.root_path.join(hash);
274 trace!("update_data() for path: {}", mod_cache_path.display());
275 let compressed_data = zstd::encode_all(
276 &serialized_data[..],
277 self.cache.baseline_compression_level(),
278 )
279 .map_err(|err| warn!("Failed to compress cached code: {err}"))
280 .ok()?;
281
282 if fs_write_atomic(&mod_cache_path, "mod", &compressed_data).is_ok() {
285 return Some(());
286 }
287
288 debug!(
289 "Attempting to create the cache directory, because \
290 failed to write cached code to disk, path: {}",
291 mod_cache_path.display(),
292 );
293
294 let cache_dir = mod_cache_path.parent().unwrap();
295 fs::create_dir_all(cache_dir)
296 .map_err(|err| {
297 warn!(
298 "Failed to create cache directory, path: {}, message: {}",
299 cache_dir.display(),
300 err
301 )
302 })
303 .ok()?;
304
305 match fs_write_atomic(&mod_cache_path, "mod", &compressed_data) {
306 Ok(_) => Some(()),
307 Err(err) => {
308 warn!(
309 "Failed to write file with rename, target path: {}, err: {}",
310 mod_cache_path.display(),
311 err
312 );
313 None
314 }
315 }
316 }
317}
318
319impl Hasher for Sha256Hasher {
320 fn finish(&self) -> u64 {
321 panic!("Sha256Hasher doesn't support finish!");
322 }
323
324 fn write(&mut self, bytes: &[u8]) {
325 self.0.update(bytes);
326 }
327}
328
329fn fs_write_atomic(path: &Path, reason: &str, contents: &[u8]) -> io::Result<()> {
333 let lock_path = path.with_extension(format!("wip-atomic-write-{reason}"));
334 fs::OpenOptions::new()
335 .create_new(true) .write(true)
337 .open(&lock_path)
338 .and_then(|mut file| file.write_all(contents))
339 .and_then(|()| fs::rename(&lock_path, &path)) }
342
343#[cfg(test)]
344mod tests;