Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions wasmtime-environ/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ target-lexicon = { version = "0.8.1", default-features = false }
pretty_env_logger = "0.3.0"
rand = { version = "0.7.0", features = ["small_rng"] }
cranelift-codegen = { version = "0.44.0", features = ["enable-serde", "all-arch"] }
filetime = "0.2.7"

[features]
default = ["std"]
Expand Down
149 changes: 85 additions & 64 deletions wasmtime-environ/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,13 @@ use std::io::Write;
use std::path::{Path, PathBuf};
use std::string::{String, ToString};

#[macro_use] // for tests
mod config;
mod worker;

use config::{cache_config, CacheConfig};
pub use config::{create_new_config, init};
use worker::worker;
use worker::{worker, Worker};

lazy_static! {
static ref SELF_MTIME: String = {
Expand Down Expand Up @@ -48,9 +49,12 @@ lazy_static! {
};
}

pub struct ModuleCacheEntry<'config> {
mod_cache_path: Option<PathBuf>,
pub struct ModuleCacheEntry<'config, 'worker>(Option<ModuleCacheEntryInner<'config, 'worker>>);

struct ModuleCacheEntryInner<'config, 'worker> {
mod_cache_path: PathBuf,
cache_config: &'config CacheConfig,
worker: &'worker Worker,
}

#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
Expand All @@ -72,95 +76,112 @@ type ModuleCacheDataTupleType = (

struct Sha256Hasher(Sha256);

impl<'config> ModuleCacheEntry<'config> {
impl<'config, 'worker> ModuleCacheEntry<'config, 'worker> {
pub fn new<'data>(
module: &Module,
function_body_inputs: &PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
isa: &dyn isa::TargetIsa,
compiler_name: &str,
generate_debug_info: bool,
) -> Self {
Self::new_with_config(
module,
function_body_inputs,
isa,
compiler_name,
generate_debug_info,
cache_config(),
)
let cache_config = cache_config();
if cache_config.enabled() {
Self(Some(ModuleCacheEntryInner::new(
module,
function_body_inputs,
isa,
compiler_name,
generate_debug_info,
cache_config,
worker(),
)))
} else {
Self(None)
}
}

#[cfg(test)]
fn from_inner<'data>(inner: ModuleCacheEntryInner<'config, 'worker>) -> Self {
Self(Some(inner))
}

pub fn get_data(&self) -> Option<ModuleCacheData> {
if let Some(inner) = &self.0 {
inner.get_data().map(|val| {
inner.worker.on_cache_get_async(&inner.mod_cache_path); // call on success
val
})
} else {
None
}
}

fn new_with_config<'data>(
pub fn update_data(&self, data: &ModuleCacheData) {
if let Some(inner) = &self.0 {
inner.update_data(data).map(|val| {
inner.worker.on_cache_update_async(&inner.mod_cache_path); // call on success
val
});
}
}
}

impl<'config, 'worker> ModuleCacheEntryInner<'config, 'worker> {
fn new<'data>(
module: &Module,
function_body_inputs: &PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
isa: &dyn isa::TargetIsa,
compiler_name: &str,
generate_debug_info: bool,
cache_config: &'config CacheConfig,
worker: &'worker Worker,
) -> Self {
let mod_cache_path = if cache_config.enabled() {
let hash = Sha256Hasher::digest(module, function_body_inputs);
let compiler_dir = if cfg!(debug_assertions) {
format!(
"{comp_name}-{comp_ver}-{comp_mtime}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
comp_mtime = *SELF_MTIME,
)
} else {
format!(
"{comp_name}-{comp_ver}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
)
};
let mod_filename = format!(
"mod-{mod_hash}{mod_dbg}",
mod_hash = base64::encode_config(&hash, base64::URL_SAFE_NO_PAD), // standard encoding uses '/' which can't be used for filename
mod_dbg = if generate_debug_info { ".d" } else { "" },
);
Some(
cache_config
.directory()
.join(isa.triple().to_string())
.join(compiler_dir)
.join(mod_filename),
let hash = Sha256Hasher::digest(module, function_body_inputs);
let compiler_dir = if cfg!(debug_assertions) {
format!(
"{comp_name}-{comp_ver}-{comp_mtime}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
comp_mtime = *SELF_MTIME,
)
} else {
None
format!(
"{comp_name}-{comp_ver}",
comp_name = compiler_name,
comp_ver = env!("GIT_REV"),
)
};
let mod_filename = format!(
"mod-{mod_hash}{mod_dbg}",
mod_hash = base64::encode_config(&hash, base64::URL_SAFE_NO_PAD), // standard encoding uses '/' which can't be used for filename
mod_dbg = if generate_debug_info { ".d" } else { "" },
);
let mod_cache_path = cache_config
.directory()
.join(isa.triple().to_string())
.join(compiler_dir)
.join(mod_filename);

Self {
mod_cache_path,
cache_config,
worker,
}
}

pub fn get_data(&self) -> Option<ModuleCacheData> {
let path = self.mod_cache_path.as_ref()?;
trace!("get_data() for path: {}", path.display());
let compressed_cache_bytes = fs::read(path).ok()?;
fn get_data(&self) -> Option<ModuleCacheData> {
trace!("get_data() for path: {}", self.mod_cache_path.display());
let compressed_cache_bytes = fs::read(&self.mod_cache_path).ok()?;
let cache_bytes = zstd::decode_all(&compressed_cache_bytes[..])
.map_err(|err| warn!("Failed to decompress cached code: {}", err))
.ok()?;
let ret = bincode::deserialize(&cache_bytes[..])
bincode::deserialize(&cache_bytes[..])
.map_err(|err| warn!("Failed to deserialize cached code: {}", err))
.ok()?;

worker().on_cache_get_async(path); // call on success
Some(ret)
}

pub fn update_data(&self, data: &ModuleCacheData) {
if self.update_data_impl(data).is_some() {
let path = self.mod_cache_path.as_ref().unwrap();
worker().on_cache_update_async(path); // call on success
}
.ok()
}

fn update_data_impl(&self, data: &ModuleCacheData) -> Option<()> {
let path = self.mod_cache_path.as_ref()?;
trace!("update_data() for path: {}", path.display());
fn update_data(&self, data: &ModuleCacheData) -> Option<()> {
trace!("update_data() for path: {}", self.mod_cache_path.display());
let serialized_data = bincode::serialize(&data)
.map_err(|err| warn!("Failed to serialize cached code: {}", err))
.ok()?;
Expand All @@ -173,17 +194,17 @@ impl<'config> ModuleCacheEntry<'config> {

// Optimize syscalls: first, try writing to disk. It should succeed in most cases.
// Otherwise, try creating the cache directory and retry writing to the file.
if fs_write_atomic(path, "mod", &compressed_data) {
if fs_write_atomic(&self.mod_cache_path, "mod", &compressed_data) {
return Some(());
}

debug!(
"Attempting to create the cache directory, because \
failed to write cached code to disk, path: {}",
path.display(),
self.mod_cache_path.display(),
);

let cache_dir = path.parent().unwrap();
let cache_dir = self.mod_cache_path.parent().unwrap();
fs::create_dir_all(cache_dir)
.map_err(|err| {
warn!(
Expand All @@ -194,7 +215,7 @@ impl<'config> ModuleCacheEntry<'config> {
})
.ok()?;

if fs_write_atomic(path, "mod", &compressed_data) {
if fs_write_atomic(&self.mod_cache_path, "mod", &compressed_data) {
Some(())
} else {
None
Expand Down
3 changes: 2 additions & 1 deletion wasmtime-environ/src/cache/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -621,4 +621,5 @@ impl CacheConfig {
}

#[cfg(test)]
mod tests;
#[macro_use]
pub mod tests;
3 changes: 2 additions & 1 deletion wasmtime-environ/src/cache/config/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use tempfile::{self, TempDir};
// that's why these function and macro always use custom cache directory
// note: tempdir removes directory when being dropped, so we need to return it to the caller,
// so the paths are valid
fn test_prolog() -> (TempDir, PathBuf, PathBuf) {
pub fn test_prolog() -> (TempDir, PathBuf, PathBuf) {
let _ = pretty_env_logger::try_init();
let temp_dir = tempfile::tempdir().expect("Can't create temporary directory");
let cache_dir = temp_dir.path().join("cache-dir");
let config_path = temp_dir.path().join("cache-config.toml");
Expand Down
Loading