Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 11 additions & 5 deletions crates/artifacts/solc/src/sources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,14 @@ impl Sources {
Self::default()
}

/// Joins all paths relative to `root`.
pub fn make_absolute(&mut self, root: &Path) {
self.0 = std::mem::take(&mut self.0)
.into_iter()
.map(|(path, source)| (root.join(path), source))
.collect();
}

/// Returns `true` if no sources should have optimized output selection.
pub fn all_dirty(&self) -> bool {
self.0.values().all(|s| s.is_dirty())
Expand Down Expand Up @@ -169,7 +177,7 @@ impl Source {
/// Recursively finds all source files under the given dir path and reads them all
#[cfg(feature = "walkdir")]
pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result<Sources, SolcIoError> {
Self::read_all_files(utils::source_files(dir, extensions))
Self::read_all(utils::source_files_iter(dir, extensions))
}

/// Recursively finds all solidity and yul files under the given dir path and reads them all
Expand All @@ -178,14 +186,12 @@ impl Source {
Self::read_all_from(dir, utils::SOLC_EXTENSIONS)
}

/// Reads all source files of the given vec
///
/// Depending on the len of the vec it will try to read the files in parallel
/// Reads all source files of the given list.
pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
Self::read_all(files)
}

/// Reads all files
/// Reads all of the given files.
#[instrument(name = "Source::read_all", skip_all)]
pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
Expand Down
2 changes: 1 addition & 1 deletion crates/compilers/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ impl<L: Language> ProjectPathsConfig<L> {

/// Returns the combined set of `Self::read_sources` + `Self::read_tests` + `Self::read_scripts`
pub fn read_input_files(&self) -> Result<Sources> {
Ok(Source::read_all_files(self.input_files())?)
Ok(Source::read_all(self.input_files_iter())?)
}
}

Expand Down
2 changes: 1 addition & 1 deletion crates/compilers/src/flatten.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ impl Flattener {

let output = output.compiler_output;

let sources = Source::read_all_files(vec![target.to_path_buf()])?;
let sources = Source::read_all([target.to_path_buf()])?;
let graph = Graph::<C::Parser>::resolve_sources(&project.paths, sources)?;

let ordered_sources = collect_ordered_deps(target, &project.paths, &graph)?;
Expand Down
9 changes: 6 additions & 3 deletions crates/compilers/src/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,9 @@ impl<P: SourceParser> Graph<P> {
Ok(())
}

// The cache relies on the absolute paths relative to the project root as cache keys.
sources.make_absolute(&paths.root);
Comment on lines +403 to +404
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah I believe that makes sense

can't think of any issues this cause


let mut parser = P::new(paths.with_language_ref());

// we start off by reading all input files, which includes all solidity files from the
Expand Down Expand Up @@ -566,14 +569,14 @@ impl<P: SourceParser> Graph<P> {
let mut versioned_sources = Vec::with_capacity(versioned_nodes.len());

for (version, profile_to_nodes) in versioned_nodes {
for (profile_idx, input_node_indixies) in profile_to_nodes {
for (profile_idx, input_node_indexes) in profile_to_nodes {
let mut sources = Sources::new();

// all input nodes will be processed
let mut processed_sources = input_node_indixies.iter().copied().collect();
let mut processed_sources = input_node_indexes.iter().copied().collect();

// we only process input nodes (from sources, tests for example)
for idx in input_node_indixies {
for idx in input_node_indexes {
// insert the input node in the sources set and remove it from the available
// set
let (path, source) =
Expand Down
Loading