Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions crates/artifacts/solc/src/sources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ impl Source {
}

/// Reads the file's content
#[instrument(name = "read_source", level = "debug", skip_all, err)]
#[instrument(name = "Source::read", skip_all, err)]
pub fn read(file: &Path) -> Result<Self, SolcIoError> {
trace!(file=%file.display());
let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?;
Expand Down Expand Up @@ -162,6 +162,7 @@ impl Source {
}

/// Reads all files
#[instrument(name = "Source::read_all", skip_all)]
pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
I: IntoIterator<Item = T>,
Expand Down Expand Up @@ -211,7 +212,7 @@ impl Source {
#[cfg(feature = "async")]
impl Source {
/// async version of `Self::read`
#[instrument(name = "async_read_source", level = "debug", skip_all, err)]
#[instrument(name = "Source::async_read", skip_all, err)]
pub async fn async_read(file: &Path) -> Result<Self, SolcIoError> {
let mut content =
tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?;
Expand Down
20 changes: 19 additions & 1 deletion crates/compilers/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
/// cache.join_artifacts_files(project.artifacts_path());
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[instrument(skip_all, name = "sol-files-cache::read")]
#[instrument(name = "CompilerCache::read", skip_all)]
pub fn read(path: &Path) -> Result<Self> {
trace!("reading solfiles cache at {}", path.display());
let cache: Self = utils::read_json_file(path)?;
Expand Down Expand Up @@ -149,6 +149,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
}

/// Write the cache as json file to the given path
#[instrument(name = "CompilerCache::write", skip_all)]
pub fn write(&self, path: &Path) -> Result<()> {
trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
utils::create_parent_dir_all(path)?;
Expand All @@ -158,6 +159,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
}

/// Removes build infos which don't have any artifacts linked to them.
#[instrument(skip_all)]
pub fn remove_outdated_builds(&mut self) {
let mut outdated = Vec::new();
for build_id in &self.builds {
Expand All @@ -180,6 +182,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
}

/// Sets the `CacheEntry`'s file paths to `root` adjoined to `self.file`.
#[instrument(skip_all)]
pub fn join_entries(&mut self, root: &Path) -> &mut Self {
self.files = std::mem::take(&mut self.files)
.into_iter()
Expand All @@ -189,6 +192,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
}

/// Removes `base` from all `CacheEntry` paths
#[instrument(skip_all)]
pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
self.files = std::mem::take(&mut self.files)
.into_iter()
Expand All @@ -198,12 +202,14 @@ impl<S: CompilerSettings> CompilerCache<S> {
}

/// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts.
#[instrument(skip_all)]
pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
self
}

/// Removes `base` from all artifact file paths
#[instrument(skip_all)]
pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
self
Expand All @@ -212,6 +218,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
/// Removes all `CacheEntry` which source files don't exist on disk
///
/// **NOTE:** this assumes the `files` are absolute
#[instrument(skip_all)]
pub fn remove_missing_files(&mut self) {
trace!("remove non existing files from cache");
self.files.retain(|file, _| {
Expand Down Expand Up @@ -292,6 +299,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
///
/// **NOTE**: unless the cache's `files` keys were modified `contract_file` is expected to be
/// absolute.
#[instrument(skip_all)]
pub fn read_artifact<Artifact: DeserializeOwned>(
&self,
contract_file: &Path,
Expand All @@ -318,6 +326,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
/// let artifacts = cache.read_artifacts::<CompactContractBytecode>()?;
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[instrument(skip_all)]
pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
&self,
) -> Result<Artifacts<Artifact>> {
Expand All @@ -335,6 +344,7 @@ impl<S: CompilerSettings> CompilerCache<S> {
/// objects, so we are basically just partially deserializing build infos here.
///
/// [BuildContext]: crate::buildinfo::BuildContext
#[instrument(skip_all)]
pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
use rayon::prelude::*;

Expand Down Expand Up @@ -491,6 +501,7 @@ impl CacheEntry {
/// Reads all artifact files associated with the `CacheEntry`
///
/// **Note:** all artifact file paths should be absolute.
#[instrument(skip_all)]
fn read_artifact_files<Artifact: DeserializeOwned>(
&self,
) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
Expand All @@ -514,6 +525,7 @@ impl CacheEntry {
Ok(artifacts)
}

#[instrument(skip_all)]
pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
where
I: IntoIterator<Item = (&'a String, A)>,
Expand Down Expand Up @@ -1017,6 +1029,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
ArtifactsCache<'a, T, C>
{
/// Create a new cache instance with the given files
#[instrument(name = "ArtifactsCache::new", skip(project, edges))]
pub fn new(
project: &'a Project<C, T>,
edges: GraphEdges<C::ParsedSource>,
Expand All @@ -1042,6 +1055,8 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
}
}

trace!(invalidate_cache, "cache invalidated");

// new empty cache
CompilerCache::new(Default::default(), paths, preprocessed)
}
Expand Down Expand Up @@ -1135,6 +1150,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
}

/// Adds the file's hashes to the set if not set yet
#[instrument(skip_all)]
pub fn remove_dirty_sources(&mut self) {
match self {
ArtifactsCache::Ephemeral(..) => {}
Expand All @@ -1161,6 +1177,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
}

/// Filters out those sources that don't need to be compiled
#[instrument(name = "ArtifactsCache::filter", skip_all)]
pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
match self {
ArtifactsCache::Ephemeral(..) => {}
Expand All @@ -1173,6 +1190,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
/// compiled and written to disk `written_artifacts`.
///
/// Returns all the _cached_ artifacts.
#[instrument(name = "ArtifactsCache::consume", skip_all)]
pub fn consume<A>(
self,
written_artifacts: &Artifacts<A>,
Expand Down
1 change: 1 addition & 0 deletions crates/compilers/src/compile/output/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
ProjectCompileOutput<C, T>
{
/// Converts all `\\` separators in _all_ paths to `/`
#[instrument(skip_all)]
pub fn slash_paths(&mut self) {
self.compiler_output.slash_paths();
self.compiled_artifacts.slash_paths();
Expand Down
8 changes: 7 additions & 1 deletion crates/compilers/src/compile/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
///
/// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows
/// multiple `jobs`, see [`crate::Project::set_solc_jobs()`].
#[instrument(name = "ProjectCompiler::new", skip_all)]
pub fn with_sources(project: &'a Project<C, T>, mut sources: Sources) -> Result<Self> {
if let Some(filter) = &project.sparse_output {
sources.retain(|f, _| filter.is_match(f))
Expand Down Expand Up @@ -209,6 +210,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
/// let output = project.compile()?;
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// ```
#[instrument(name = "compile_project", skip_all)]
pub fn compile(self) -> Result<ProjectCompileOutput<C, T>> {
let slash_paths = self.project.slash_paths;

Expand All @@ -226,6 +228,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
/// Does basic preprocessing
/// - sets proper source unit names
/// - check cache
#[instrument(skip_all)]
fn preprocess(self) -> Result<PreprocessedState<'a, T, C>> {
trace!("preprocessing");
let Self { edges, project, mut sources, primary_profiles, preprocessor } = self;
Expand Down Expand Up @@ -265,6 +268,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
PreprocessedState<'a, T, C>
{
/// advance to the next state by compiling all sources
#[instrument(skip_all)]
fn compile(self) -> Result<CompiledState<'a, T, C>> {
trace!("compiling");
let PreprocessedState { sources, mut cache, primary_profiles, preprocessor } = self;
Expand Down Expand Up @@ -297,7 +301,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
///
/// Writes all output contracts to disk if enabled in the `Project` and if the build was
/// successful
#[instrument(skip_all, name = "write-artifacts")]
#[instrument(skip_all)]
fn write_artifacts(self) -> Result<ArtifactsState<'a, T, C>> {
let CompiledState { output, cache, primary_profiles } = self;

Expand Down Expand Up @@ -365,6 +369,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
/// Writes the cache file
///
/// this concludes the [`Project::compile()`] statemachine
#[instrument(skip_all)]
fn write_cache(self) -> Result<ProjectCompileOutput<C, T>> {
let ArtifactsState { output, cache, compiled_artifacts } = self;
let project = cache.project();
Expand Down Expand Up @@ -436,6 +441,7 @@ impl<L: Language, S: CompilerSettings> CompilerSources<'_, L, S> {
}

/// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`]
#[instrument(name = "CompilerSources::filter", skip_all)]
fn filter<
T: ArtifactOutput<CompilerContract = C::CompilerContract>,
C: Compiler<Language = L>,
Expand Down
28 changes: 16 additions & 12 deletions crates/compilers/src/compilers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use semver::{Version, VersionReq};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet},
fmt::{Debug, Display},
hash::Hash,
path::{Path, PathBuf},
Expand Down Expand Up @@ -356,20 +356,24 @@ pub(crate) fn cache_version(
f: impl FnOnce(&Path) -> Result<Version>,
) -> Result<Version> {
#[allow(clippy::complexity)]
static VERSION_CACHE: OnceLock<Mutex<HashMap<PathBuf, HashMap<Vec<String>, Version>>>> =
static VERSION_CACHE: OnceLock<Mutex<HashMap<(PathBuf, Vec<String>), Version>>> =
OnceLock::new();
let mut lock = VERSION_CACHE
.get_or_init(|| Mutex::new(HashMap::new()))

let mut cache = VERSION_CACHE
.get_or_init(Default::default)
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);

if let Some(version) = lock.get(&path).and_then(|versions| versions.get(args)) {
return Ok(version.clone());
match cache.entry((path, args.to_vec())) {
Entry::Occupied(entry) => Ok(entry.get().clone()),
Entry::Vacant(entry) => {
let path = &entry.key().0;
let _guard =
debug_span!("get_version", path = %path.file_name().map(|n| n.to_string_lossy()).unwrap_or_else(|| path.to_string_lossy()))
.entered();
let version = f(path)?;
entry.insert(version.clone());
Ok(version)
}
}

let version = f(&path)?;

lock.entry(path).or_default().insert(args.to_vec(), version.clone());

Ok(version)
}
10 changes: 7 additions & 3 deletions crates/compilers/src/compilers/solc/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ impl Solc {
/// A new instance which points to `solc`. Invokes `solc --version` to determine the version.
///
/// Returns error if `solc` is not found in the system or if the version cannot be retrieved.
#[instrument(name = "Solc::new", skip_all)]
pub fn new(path: impl Into<PathBuf>) -> Result<Self> {
let path = path.into();
let version = Self::version(path.clone())?;
Expand Down Expand Up @@ -200,6 +201,7 @@ impl Solc {
///
/// Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[instrument(skip_all)]
pub fn find_svm_installed_version(version: &Version) -> Result<Option<Self>> {
let version = format!("{}.{}.{}", version.major, version.minor, version.patch);
let solc = Self::svm_home()
Expand Down Expand Up @@ -266,6 +268,7 @@ impl Solc {
/// # }
/// ```
#[cfg(feature = "svm-solc")]
#[instrument(name = "Solc::install", skip_all)]
pub async fn install(version: &Version) -> std::result::Result<Self, svm::SvmError> {
trace!("installing solc version \"{}\"", version);
crate::report::solc_installation_start(version);
Expand All @@ -283,6 +286,7 @@ impl Solc {

/// Blocking version of `Self::install`
#[cfg(feature = "svm-solc")]
#[instrument(name = "Solc::blocking_install", skip_all)]
pub fn blocking_install(version: &Version) -> std::result::Result<Self, svm::SvmError> {
use foundry_compilers_core::utils::RuntimeOrHandle;

Expand Down Expand Up @@ -311,6 +315,7 @@ impl Solc {
/// Verify that the checksum for this version of solc is correct. We check against the SHA256
/// checksum from the build information published by [binaries.soliditylang.org](https://binaries.soliditylang.org/)
#[cfg(feature = "svm-solc")]
#[instrument(name = "Solc::verify_checksum", skip_all)]
pub fn verify_checksum(&self) -> Result<()> {
let version = self.version_short();
let mut version_path = svm::version_path(version.to_string().as_str());
Expand Down Expand Up @@ -407,6 +412,7 @@ impl Solc {
}

/// Compiles with `--standard-json` and deserializes the output as the given `D`.
#[instrument(name = "Solc::compile", skip_all)]
pub fn compile_as<T: Serialize, D: DeserializeOwned>(&self, input: &T) -> Result<D> {
let output = self.compile_output(input)?;

Expand All @@ -417,7 +423,7 @@ impl Solc {
}

/// Compiles with `--standard-json` and returns the raw `stdout` output.
#[instrument(name = "compile", level = "debug", skip_all)]
#[instrument(name = "Solc::compile_raw", skip_all)]
pub fn compile_output<T: Serialize>(&self, input: &T) -> Result<Vec<u8>> {
let mut cmd = self.configure_cmd();

Expand Down Expand Up @@ -447,13 +453,11 @@ impl Solc {
}

/// Invokes `solc --version` and parses the output as a SemVer [`Version`].
#[instrument(level = "debug", skip_all)]
pub fn version(solc: impl Into<PathBuf>) -> Result<Version> {
Self::version_with_args(solc, &[])
}

/// Invokes `solc --version` and parses the output as a SemVer [`Version`].
#[instrument(level = "debug", skip_all)]
pub fn version_with_args(solc: impl Into<PathBuf>, args: &[String]) -> Result<Version> {
crate::cache_version(solc.into(), args, |solc| {
let mut cmd = Command::new(solc);
Expand Down
4 changes: 2 additions & 2 deletions crates/compilers/src/compilers/vyper/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ impl Vyper {
}

/// Compiles with `--standard-json` and deserializes the output as the given `D`.
#[instrument(name = "Vyper::compile", skip_all)]
pub fn compile_as<T: Serialize, D: DeserializeOwned>(&self, input: &T) -> Result<D> {
let output = self.compile_output(input)?;

Expand All @@ -139,7 +140,7 @@ impl Vyper {
}

/// Compiles with `--standard-json` and returns the raw `stdout` output.
#[instrument(name = "compile", level = "debug", skip_all)]
#[instrument(name = "Vyper::compile_raw", skip_all)]
pub fn compile_output<T: Serialize>(&self, input: &T) -> Result<Vec<u8>> {
let mut cmd = Command::new(&self.path);
cmd.arg("--standard-json")
Expand Down Expand Up @@ -171,7 +172,6 @@ impl Vyper {
}

/// Invokes `vyper --version` and parses the output as a SemVer [`Version`].
#[instrument(level = "debug", skip_all)]
pub fn version(vyper: impl Into<PathBuf>) -> Result<Version> {
crate::cache_version(vyper.into(), &[], |vyper| {
let mut cmd = Command::new(vyper);
Expand Down
1 change: 1 addition & 0 deletions crates/compilers/src/compilers/vyper/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ pub struct VyperParsedSource {
impl ParsedSource for VyperParsedSource {
type Language = VyperLanguage;

#[instrument(name = "VyperParsedSource::parse", skip_all)]
fn parse(content: &str, file: &Path) -> Result<Self> {
let version_req = capture_outer_and_inner(content, &RE_VYPER_VERSION, &["version"])
.first()
Expand Down
1 change: 1 addition & 0 deletions crates/compilers/src/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,7 @@ impl<L: Language, D: ParsedSource<Language = L>> Graph<D> {
}

/// Resolves a number of sources within the given config
#[instrument(name = "Graph::resolve_sources", skip_all)]
pub fn resolve_sources(
paths: &ProjectPathsConfig<D::Language>,
sources: Sources,
Expand Down
Loading
Loading