diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 965c17b00..81a1a0c68 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,7 @@ on: env: CARGO_TERM_COLOR: always + RUST_BACKTRACE: full concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.gitignore b/.gitignore index 62eb994d4..0f5e46a2e 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,8 @@ /target /Cargo.lock -cache/ +/cache +test-data/**/cache/ .vscode /.envrc @@ -12,4 +13,4 @@ cache/ devenv.local.nix .direnv .pre-commit-config.yaml -.lock \ No newline at end of file +.lock diff --git a/Cargo.toml b/Cargo.toml index f745ca063..681b0308a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,8 +22,9 @@ manual-string-new = "warn" uninlined-format-args = "warn" use-self = "warn" redundant-clone = "warn" -# until is fixed -literal-string-with-formatting-args = "allow" + +result-large-err = "allow" +large-enum-variant = "allow" [workspace.lints.rust] rust-2018-idioms = "warn" @@ -54,7 +55,8 @@ semver = { version = "1.0", features = ["serde"] } serde = { version = "1", features = ["derive", "rc"] } serde_json = "1.0" similar-asserts = "1" -solar-parse = { version = "=0.1.1", default-features = false } +solar-parse = { version = "=0.1.2", default-features = false } +solar-sema = { version = "=0.1.2", default-features = false } svm = { package = "svm-rs", version = "0.5", default-features = false } tempfile = "3.9" thiserror = "2" diff --git a/crates/artifacts/solc/src/ast/misc.rs b/crates/artifacts/solc/src/ast/misc.rs index 6ec3187be..7144ddc5f 100644 --- a/crates/artifacts/solc/src/ast/misc.rs +++ b/crates/artifacts/solc/src/ast/misc.rs @@ -4,7 +4,7 @@ use std::{fmt, fmt::Write, str::FromStr}; /// Represents the source location of a node: `::`. /// /// The `start`, `length` and `index` can be -1 which is represented as `None` -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct SourceLocation { pub start: Option, pub length: Option, diff --git a/crates/artifacts/solc/src/sources.rs b/crates/artifacts/solc/src/sources.rs index 01c520864..c9ef1fe60 100644 --- a/crates/artifacts/solc/src/sources.rs +++ b/crates/artifacts/solc/src/sources.rs @@ -198,7 +198,13 @@ impl Source { /// Generate a non-cryptographically secure checksum of the file's content. #[cfg(feature = "checksum")] pub fn content_hash(&self) -> String { - alloy_primitives::hex::encode(::digest(self.content.as_bytes())) + Self::content_hash_of(&self.content) + } + + /// Generate a non-cryptographically secure checksum of the given source. + #[cfg(feature = "checksum")] + pub fn content_hash_of(src: &str) -> String { + alloy_primitives::hex::encode(::digest(src)) } } diff --git a/crates/compilers/Cargo.toml b/crates/compilers/Cargo.toml index e0fd8a685..7034d6498 100644 --- a/crates/compilers/Cargo.toml +++ b/crates/compilers/Cargo.toml @@ -33,6 +33,7 @@ thiserror.workspace = true path-slash.workspace = true yansi.workspace = true solar-parse.workspace = true +solar-sema.workspace = true futures-util = { workspace = true, optional = true } tokio = { workspace = true, optional = true } diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 5fcd95e98..e22c5386d 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -25,6 +25,9 @@ use std::{ time::{Duration, UNIX_EPOCH}, }; +mod iface; +use iface::interface_repr_hash; + /// ethers-rs format version /// /// `ethers-solc` uses a different format version id, but the actual format is consistent with @@ -45,16 +48,20 @@ pub struct CompilerCache { pub files: BTreeMap, pub builds: BTreeSet, pub profiles: BTreeMap, + pub preprocessed: bool, + pub mocks: HashSet, } impl CompilerCache { - pub fn new(format: String, paths: ProjectPaths) -> Self { + pub fn new(format: String, paths: ProjectPaths, preprocessed: bool) -> Self { Self { format, paths, files: Default::default(), builds: Default::default(), profiles: Default::default(), + preprocessed, + mocks: Default::default(), } } } @@ -361,10 +368,7 @@ impl CompilerCache { { match tokio::task::spawn_blocking(f).await { Ok(res) => res, - Err(_) => Err(SolcError::io( - std::io::Error::new(std::io::ErrorKind::Other, "background task failed"), - "", - )), + Err(_) => Err(SolcError::io(std::io::Error::other("background task failed"), "")), } } } @@ -377,6 +381,8 @@ impl Default for CompilerCache { files: Default::default(), paths: Default::default(), profiles: Default::default(), + preprocessed: false, + mocks: Default::default(), } } } @@ -384,7 +390,7 @@ impl Default for CompilerCache { impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache { fn from(config: &'a ProjectPathsConfig) -> Self { let paths = config.paths_relative(); - Self::new(Default::default(), paths) + Self::new(Default::default(), paths, false) } } @@ -411,6 +417,8 @@ pub struct CacheEntry { pub last_modification_date: u64, /// hash to identify whether the content of the file changed pub content_hash: String, + /// hash of the interface representation of the file, if it's a source file + pub interface_repr_hash: Option, /// identifier name see [`foundry_compilers_core::utils::source_name()`] pub source_name: PathBuf, /// fully resolved imports of the file @@ -654,11 +662,19 @@ pub(crate) struct ArtifactsCacheInner< /// The file hashes. pub content_hashes: HashMap, + + /// The interface representations for source files. + pub interface_repr_hashes: HashMap, } impl, C: Compiler> ArtifactsCacheInner<'_, T, C> { + /// Whether given file is a source file or a test/script file. + fn is_source_file(&self, file: &Path) -> bool { + self.project.paths.is_source_file(file) + } + /// Creates a new cache entry for the file fn create_cache_entry(&mut self, file: PathBuf, source: &Source) { let imports = self @@ -668,10 +684,14 @@ impl, C: Compiler> .map(|import| strip_prefix(import, self.project.root()).into()) .collect(); + let interface_repr_hash = (self.cache.preprocessed && self.is_source_file(&file)) + .then(|| self.interface_repr_hash(source, &file).to_string()); + let entry = CacheEntry { last_modification_date: CacheEntry::read_last_modification_date(&file) .unwrap_or_default(), content_hash: source.content_hash(), + interface_repr_hash, source_name: strip_prefix(&file, self.project.root()).into(), imports, version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()), @@ -683,6 +703,25 @@ impl, C: Compiler> self.cache.files.insert(file, entry); } + /// Gets or calculates the content hash for the given source file. + fn content_hash(&mut self, source: &Source, file: &Path) -> &str { + self.content_hashes.entry(file.to_path_buf()).or_insert_with(|| source.content_hash()) + } + + /// Gets or calculates the interface representation hash for the given source file. + fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str { + self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| { + if let Some(r) = interface_repr_hash(&source.content, file) { + return r; + } + // Equivalent to: self.content_hash(source, file).into() + self.content_hashes + .entry(file.to_path_buf()) + .or_insert_with(|| source.content_hash()) + .clone() + }) + } + /// Returns the set of [Source]s that need to be compiled to produce artifacts for requested /// input. /// @@ -703,7 +742,7 @@ impl, C: Compiler> // If we are missing artifact for file, compile it. if self.is_missing_artifacts(file, version, profile) { - compile_complete.insert(file.clone()); + compile_complete.insert(file.to_path_buf()); } // Ensure that we have a cache entry for all sources. @@ -717,15 +756,15 @@ impl, C: Compiler> for source in &compile_complete { for import in self.edges.imports(source) { if !compile_complete.contains(import) { - compile_optimized.insert(import.clone()); + compile_optimized.insert(import); } } } sources.retain(|file, source| { - source.kind = if compile_complete.contains(file) { + source.kind = if compile_complete.contains(file.as_path()) { SourceCompilationKind::Complete - } else if compile_optimized.contains(file) { + } else if compile_optimized.contains(file.as_path()) { SourceCompilationKind::Optimized } else { return false; @@ -765,10 +804,12 @@ impl, C: Compiler> return true; } - false + // If any requested extra files are missing for any artifact, mark source as dirty to + // generate them + self.missing_extra_files() } - // Walks over all cache entires, detects dirty files and removes them from cache. + // Walks over all cache entries, detects dirty files and removes them from cache. fn find_and_remove_dirty(&mut self) { fn populate_dirty_files( file: &Path, @@ -845,14 +886,47 @@ impl, C: Compiler> // Pre-add all sources that are guaranteed to be dirty for file in sources.keys() { - if self.is_dirty_impl(file) { + if self.is_dirty_impl(file, false) { self.dirty_sources.insert(file.clone()); } } - // Perform DFS to find direct/indirect importers of dirty files. - for file in self.dirty_sources.clone().iter() { - populate_dirty_files(file, &mut self.dirty_sources, &edges); + if !self.cache.preprocessed { + // Perform DFS to find direct/indirect importers of dirty files. + for file in self.dirty_sources.clone().iter() { + populate_dirty_files(file, &mut self.dirty_sources, &edges); + } + } else { + // Mark sources as dirty based on their imports + for file in sources.keys() { + if self.dirty_sources.contains(file) { + continue; + } + let is_src = self.is_source_file(file); + for import in edges.imports(file) { + // Any source file importing dirty source file is dirty. + if is_src && self.dirty_sources.contains(import) { + self.dirty_sources.insert(file.clone()); + break; + // For non-src files we mark them as dirty only if they import dirty + // non-src file or src file for which interface representation changed. + // For identified mock contracts (non-src contracts that extends contracts + // from src file) we mark edges as dirty. + } else if !is_src + && self.dirty_sources.contains(import) + && (!self.is_source_file(import) + || self.is_dirty_impl(import, true) + || self.cache.mocks.contains(file)) + { + if self.cache.mocks.contains(file) { + // Mark all mock edges as dirty. + populate_dirty_files(file, &mut self.dirty_sources, &edges); + } else { + self.dirty_sources.insert(file.clone()); + } + } + } + } } } else { // Purge all sources on graph resolution error. @@ -866,31 +940,31 @@ impl, C: Compiler> } } - fn is_dirty_impl(&self, file: &Path) -> bool { - let Some(hash) = self.content_hashes.get(file) else { - trace!("missing content hash"); - return true; - }; - + fn is_dirty_impl(&self, file: &Path, use_interface_repr: bool) -> bool { let Some(entry) = self.cache.entry(file) else { trace!("missing cache entry"); return true; }; - if entry.content_hash != *hash { - trace!("content hash changed"); - return true; - } + if use_interface_repr && self.cache.preprocessed { + let Some(interface_hash) = self.interface_repr_hashes.get(file) else { + trace!("missing interface hash"); + return true; + }; - // If any requested extra files are missing for any artifact, mark source as dirty to - // generate them - for artifacts in self.cached_artifacts.values() { - for artifacts in artifacts.values() { - for artifact_file in artifacts { - if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) { - return true; - } - } + if entry.interface_repr_hash.as_ref() != Some(interface_hash) { + trace!("interface hash changed"); + return true; + }; + } else { + let Some(content_hash) = self.content_hashes.get(file) else { + trace!("missing content hash"); + return true; + }; + + if entry.content_hash != *content_hash { + trace!("content hash changed"); + return true; } } @@ -901,10 +975,27 @@ impl, C: Compiler> /// Adds the file's hashes to the set if not set yet fn fill_hashes(&mut self, sources: &Sources) { for (file, source) in sources { - if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { - entry.insert(source.content_hash()); + let _ = self.content_hash(source, file); + + // Fill interface representation hashes for source files + if self.cache.preprocessed && self.project.paths.is_source_file(file) { + let _ = self.interface_repr_hash(source, file); + } + } + } + + /// Helper function to check if any requested extra files are missing for any artifact. + fn missing_extra_files(&self) -> bool { + for artifacts in self.cached_artifacts.values() { + for artifacts in artifacts.values() { + for artifact_file in artifacts { + if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) { + return true; + } + } } } + false } } @@ -926,28 +1017,33 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { /// Create a new cache instance with the given files - pub fn new(project: &'a Project, edges: GraphEdges) -> Result { + pub fn new( + project: &'a Project, + edges: GraphEdges, + preprocessed: bool, + ) -> Result { /// Returns the [CompilerCache] to use /// /// Returns a new empty cache if the cache does not exist or `invalidate_cache` is set. fn get_cache, C: Compiler>( project: &Project, invalidate_cache: bool, + preprocessed: bool, ) -> CompilerCache { // the currently configured paths let paths = project.paths.paths_relative(); if !invalidate_cache && project.cache_path().exists() { if let Ok(cache) = CompilerCache::read_joined(&project.paths) { - if cache.paths == paths { - // unchanged project paths + if cache.paths == paths && preprocessed == cache.preprocessed { + // unchanged project paths and same preprocess cache option return cache; } } } // new empty cache - CompilerCache::new(Default::default(), paths) + CompilerCache::new(Default::default(), paths, preprocessed) } let cache = if project.cached { @@ -957,7 +1053,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> let invalidate_cache = !edges.unresolved_imports().is_empty(); // read the cache file if it already exists - let mut cache = get_cache(project, invalidate_cache); + let mut cache = get_cache(project, invalidate_cache, preprocessed); cache.remove_missing_files(); @@ -993,6 +1089,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> dirty_sources: Default::default(), content_hashes: Default::default(), sources_in_scope: Default::default(), + interface_repr_hashes: Default::default(), }; ArtifactsCache::Cached(cache) @@ -1045,6 +1142,24 @@ impl<'a, T: ArtifactOutput, C: Compiler> } } + /// Updates files with mock contracts identified in preprocess phase. + pub fn update_mocks(&mut self, mocks: HashSet) { + match self { + ArtifactsCache::Ephemeral(..) => {} + ArtifactsCache::Cached(cache) => cache.cache.mocks = mocks, + } + } + + /// Returns the set of files with mock contracts currently in cache. + /// This set is passed to preprocessors and updated accordingly. + /// Cache is then updated by using `update_mocks` call. + pub fn mocks(&self) -> HashSet { + match self { + ArtifactsCache::Ephemeral(..) => HashSet::default(), + ArtifactsCache::Cached(cache) => cache.cache.mocks.clone(), + } + } + /// Filters out those sources that don't need to be compiled pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) { match self { diff --git a/crates/compilers/src/cache/iface.rs b/crates/compilers/src/cache/iface.rs new file mode 100644 index 000000000..8f006ef41 --- /dev/null +++ b/crates/compilers/src/cache/iface.rs @@ -0,0 +1,96 @@ +use crate::{parse_one_source, replace_source_content}; +use solar_sema::{ + ast::{self, Span}, + interface::diagnostics::EmittedDiagnostics, +}; +use std::path::Path; + +pub(crate) fn interface_repr_hash(content: &str, path: &Path) -> Option { + let src = interface_repr(content, path).ok()?; + Some(foundry_compilers_artifacts::Source::content_hash_of(&src)) +} + +pub(crate) fn interface_repr(content: &str, path: &Path) -> Result { + parse_one_source(content, path, |ast| interface_representation_ast(content, &ast)) +} + +/// Helper function to remove parts of the contract which do not alter its interface: +/// - Internal functions +/// - External functions bodies +/// +/// Preserves all libraries and interfaces. +pub(crate) fn interface_representation_ast( + content: &str, + ast: &solar_parse::ast::SourceUnit<'_>, +) -> String { + let mut spans_to_remove: Vec = Vec::new(); + for item in ast.items.iter() { + let ast::ItemKind::Contract(contract) = &item.kind else { + continue; + }; + + if contract.kind.is_interface() || contract.kind.is_library() { + continue; + } + + for contract_item in contract.body.iter() { + if let ast::ItemKind::Function(function) = &contract_item.kind { + let is_exposed = match function.kind { + // Function with external or public visibility + ast::FunctionKind::Function => { + function.header.visibility >= Some(ast::Visibility::Public) + } + ast::FunctionKind::Constructor + | ast::FunctionKind::Fallback + | ast::FunctionKind::Receive => true, + ast::FunctionKind::Modifier => false, + }; + + // If function is not exposed we remove the entire span (signature and + // body). Otherwise we keep function signature and + // remove only the body. + if !is_exposed { + spans_to_remove.push(contract_item.span); + } else { + spans_to_remove.push(function.body_span); + } + } + } + } + let content = + replace_source_content(content, spans_to_remove.iter().map(|span| (span.to_range(), ""))) + .replace("\n", ""); + crate::utils::RE_TWO_OR_MORE_SPACES.replace_all(&content, "").into_owned() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_interface_representation() { + let content = r#" +library Lib { + function libFn() internal { + // logic to keep + } +} +contract A { + function a() external {} + function b() public {} + function c() internal { + // logic logic logic + } + function d() private {} + function e() external { + // logic logic logic + } +}"#; + + let result = interface_repr(content, Path::new("")).unwrap(); + assert_eq!( + result, + r#"library Lib {function libFn() internal {// logic to keep}}contract A {function a() externalfunction b() publicfunction e() external }"# + ); + } +} diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index 91195cb1b..6ffc26f49 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -109,16 +109,36 @@ use crate::{ output::{AggregatedCompilerOutput, Builds}, report, resolver::{GraphEdges, ResolvedSources}, - ArtifactOutput, CompilerSettings, Graph, Project, ProjectCompileOutput, Sources, + ArtifactOutput, CompilerSettings, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, + Sources, }; use foundry_compilers_core::error::Result; use rayon::prelude::*; use semver::Version; -use std::{collections::HashMap, path::PathBuf, time::Instant}; +use std::{ + collections::{HashMap, HashSet}, + fmt::Debug, + path::PathBuf, + time::Instant, +}; /// A set of different Solc installations with their version and the sources to be compiled pub(crate) type VersionedSources<'a, L, S> = HashMap>; +/// Invoked before the actual compiler invocation and can override the input. +/// +/// Updates the list of identified cached mocks (if any) to be stored in cache and updates the +/// compiler input. +pub trait Preprocessor: Debug { + fn preprocess( + &self, + compiler: &C, + input: &mut C::Input, + paths: &ProjectPathsConfig, + mocks: &mut HashSet, + ) -> Result<()>; +} + #[derive(Debug)] pub struct ProjectCompiler< 'a, @@ -132,6 +152,8 @@ pub struct ProjectCompiler< primary_profiles: HashMap, /// how to compile all the sources sources: CompilerSources<'a, C::Language, C::Settings>, + /// Optional preprocessor + preprocessor: Option>>, } impl<'a, T: ArtifactOutput, C: Compiler> @@ -165,7 +187,11 @@ impl<'a, T: ArtifactOutput, C: Compiler> sources, }; - Ok(Self { edges, primary_profiles, project, sources }) + Ok(Self { edges, primary_profiles, project, sources, preprocessor: None }) + } + + pub fn with_preprocessor(self, preprocessor: impl Preprocessor + 'static) -> Self { + Self { preprocessor: Some(Box::new(preprocessor)), ..self } } /// Compiles all the sources of the `Project` in the appropriate mode @@ -202,17 +228,17 @@ impl<'a, T: ArtifactOutput, C: Compiler> /// - check cache fn preprocess(self) -> Result> { trace!("preprocessing"); - let Self { edges, project, mut sources, primary_profiles } = self; + let Self { edges, project, mut sources, primary_profiles, preprocessor } = self; // convert paths on windows to ensure consistency with the `CompilerOutput` `solc` emits, // which is unix style `/` sources.slash_paths(); - let mut cache = ArtifactsCache::new(project, edges)?; + let mut cache = ArtifactsCache::new(project, edges, preprocessor.is_some())?; // retain and compile only dirty sources and all their imports sources.filter(&mut cache); - Ok(PreprocessedState { sources, cache, primary_profiles }) + Ok(PreprocessedState { sources, cache, primary_profiles, preprocessor }) } } @@ -230,6 +256,9 @@ struct PreprocessedState<'a, T: ArtifactOutput, + + /// Optional preprocessor + preprocessor: Option>>, } impl<'a, T: ArtifactOutput, C: Compiler> @@ -238,9 +267,9 @@ impl<'a, T: ArtifactOutput, C: Compiler> /// advance to the next state by compiling all sources fn compile(self) -> Result> { trace!("compiling"); - let PreprocessedState { sources, mut cache, primary_profiles } = self; + let PreprocessedState { sources, mut cache, primary_profiles, preprocessor } = self; - let mut output = sources.compile(&mut cache)?; + let mut output = sources.compile(&mut cache, preprocessor)?; // source paths get stripped before handing them over to solc, so solc never uses absolute // paths, instead `--base-path ` is set. this way any metadata that's derived from @@ -435,6 +464,7 @@ impl CompilerSources<'_, L, S> { >( self, cache: &mut ArtifactsCache<'_, T, C>, + preprocessor: Option>>, ) -> Result> { let project = cache.project(); let graph = cache.graph(); @@ -447,6 +477,10 @@ impl CompilerSources<'_, L, S> { let mut include_paths = project.paths.include_paths.clone(); include_paths.extend(graph.include_paths().clone()); + // Get current list of mocks from cache. This will be passed to preprocessors and updated + // accordingly, then set back in cache. + let mut mocks = cache.mocks(); + let mut jobs = Vec::new(); for (language, versioned_sources) in self.sources { for (version, sources, (profile, opt_settings)) in versioned_sources { @@ -481,10 +515,22 @@ impl CompilerSources<'_, L, S> { input.strip_prefix(project.paths.root.as_path()); + if let Some(preprocessor) = preprocessor.as_ref() { + preprocessor.preprocess( + &project.compiler, + &mut input, + &project.paths, + &mut mocks, + )?; + } + jobs.push((input, profile, actually_dirty)); } } + // Update cache with mocks updated by preprocessors. + cache.update_mocks(mocks); + let results = if let Some(num_jobs) = jobs_cnt { compile_parallel(&project.compiler, jobs, num_jobs) } else { diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 3c1d6526a..e4381aa8c 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -113,7 +113,7 @@ pub struct SolcVersionedInput { #[serde(flatten)] pub input: SolcInput, #[serde(flatten)] - cli_settings: CliSettings, + pub cli_settings: CliSettings, } impl CompilerInput for SolcVersionedInput { diff --git a/crates/compilers/src/config.rs b/crates/compilers/src/config.rs index 047b86a3b..356d80b98 100644 --- a/crates/compilers/src/config.rs +++ b/crates/compilers/src/config.rs @@ -250,6 +250,26 @@ impl ProjectPathsConfig { Self::dapptools(&std::env::current_dir().map_err(|err| SolcError::io(err, "."))?) } + /// Returns true if the given path is a test or script file. + pub fn is_test_or_script(&self, path: &Path) -> bool { + self.is_test(path) || self.is_script(path) + } + + /// Returns true if the given path is a test file. + pub fn is_test(&self, path: &Path) -> bool { + path_starts_with_rooted(path, &self.tests, &self.root) + } + + /// Returns true if the given path is a script file. + pub fn is_script(&self, path: &Path) -> bool { + path_starts_with_rooted(path, &self.scripts, &self.root) + } + + /// Returns true if the given path is a test or script file. + pub fn is_source_file(&self, path: &Path) -> bool { + !self.is_test_or_script(path) + } + /// Returns a new [ProjectPaths] instance that contains all directories configured for this /// project pub fn paths(&self) -> ProjectPaths { @@ -683,6 +703,26 @@ impl ProjectPaths { .collect(); self } + + /// Returns true if the given path is a test or script file. + pub fn is_test_or_script(&self, path: &Path) -> bool { + self.is_test(path) || self.is_script(path) + } + + /// Returns true if the given path is a test file. + pub fn is_test(&self, path: &Path) -> bool { + path.starts_with(&self.tests) + } + + /// Returns true if the given path is a script file. + pub fn is_script(&self, path: &Path) -> bool { + path.starts_with(&self.scripts) + } + + /// Returns true if the given path is a test or script file. + pub fn is_source_file(&self, path: &Path) -> bool { + !self.is_test_or_script(path) + } } impl Default for ProjectPaths { @@ -974,6 +1014,17 @@ impl SolcConfigBuilder { } } +/// Return true if `a` starts with `b` or `b - root`. +fn path_starts_with_rooted(a: &Path, b: &Path, root: &Path) -> bool { + if a.starts_with(b) { + return true; + } + if let Ok(b) = b.strip_prefix(root) { + return a.starts_with(b); + } + false +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/compilers/src/filter.rs b/crates/compilers/src/filter.rs index 119d96744..979b1e5be 100644 --- a/crates/compilers/src/filter.rs +++ b/crates/compilers/src/filter.rs @@ -121,7 +121,7 @@ impl<'a> SparseOutputFilter<'a> { let mut required_sources = vec![file.clone()]; if let Some(data) = graph.get_parsed_source(file) { let imports = graph.imports(file).into_iter().filter_map(|import| { - graph.get_parsed_source(import).map(|data| (import.as_path(), data)) + graph.get_parsed_source(import).map(|data| (import, data)) }); for import in data.compilation_dependencies(imports) { let import = import.to_path_buf(); diff --git a/crates/compilers/src/flatten.rs b/crates/compilers/src/flatten.rs index ce2672d38..de629cca7 100644 --- a/crates/compilers/src/flatten.rs +++ b/crates/compilers/src/flatten.rs @@ -1,8 +1,9 @@ use crate::{ + apply_updates, compilers::{Compiler, ParsedSource}, filter::MaybeSolData, resolver::parse::SolData, - ArtifactOutput, CompilerSettings, Graph, Project, ProjectPathsConfig, + ArtifactOutput, CompilerSettings, Graph, Project, ProjectPathsConfig, Updates, }; use foundry_compilers_artifacts::{ ast::{visitor::Visitor, *}, @@ -20,6 +21,7 @@ use std::{ collections::{BTreeSet, HashMap, HashSet}, hash::Hash, path::{Path, PathBuf}, + sync::Arc, }; use visitor::Walk; @@ -95,7 +97,7 @@ impl Visitor for ReferencesCollector { } fn visit_external_assembly_reference(&mut self, reference: &ExternalInlineAssemblyReference) { - let mut src = reference.src.clone(); + let mut src = reference.src; // If suffix is used in assembly reference (e.g. value.slot), it will be included into src. // However, we are only interested in the referenced name, thus we strip . part. @@ -110,49 +112,30 @@ impl Visitor for ReferencesCollector { } } -/// Updates to be applied to the sources. -/// source_path -> (start, end, new_value) -type Updates = HashMap>; - -pub struct FlatteningResult<'a> { +pub struct FlatteningResult { /// Updated source in the order they should be written to the output file. sources: Vec, /// Pragmas that should be present in the target file. pragmas: Vec, /// License identifier that should be present in the target file. - license: Option<&'a str>, + license: Option, } -impl<'a> FlatteningResult<'a> { +impl FlatteningResult { fn new( - flattener: &Flattener, - mut updates: Updates, + mut flattener: Flattener, + updates: Updates, pragmas: Vec, - license: Option<&'a str>, + license: Option, ) -> Self { - let mut sources = Vec::new(); - - for path in &flattener.ordered_sources { - let mut content = flattener.sources.get(path).unwrap().content.as_bytes().to_vec(); - let mut offset: isize = 0; - if let Some(updates) = updates.remove(path) { - let mut updates = updates.iter().collect::>(); - updates.sort_by_key(|(start, _, _)| *start); - for (start, end, new_value) in updates { - let start = (*start as isize + offset) as usize; - let end = (*end as isize + offset) as usize; - - content.splice(start..end, new_value.bytes()); - offset += new_value.len() as isize - (end - start) as isize; - } - } - let content = format!( - "// {}\n{}", - path.strip_prefix(&flattener.project_root).unwrap_or(path).display(), - String::from_utf8(content).unwrap() - ); - sources.push(content); - } + apply_updates(&mut flattener.sources, updates); + + let sources = flattener + .ordered_sources + .iter() + .map(|path| flattener.sources.remove(path).unwrap().content) + .map(Arc::unwrap_or_clone) + .collect(); Self { sources, pragmas, license } } @@ -229,7 +212,7 @@ impl Flattener { let sources = Source::read_all_files(vec![target.to_path_buf()])?; let graph = Graph::::resolve_sources(&project.paths, sources)?; - let ordered_sources = collect_ordered_deps(&target.to_path_buf(), &project.paths, &graph)?; + let ordered_sources = collect_ordered_deps(target, &project.paths, &graph)?; #[cfg(windows)] let ordered_sources = { @@ -261,7 +244,7 @@ impl Flattener { sources, asts, ordered_sources, - project_root: project.root().clone(), + project_root: project.root().to_path_buf(), }) } @@ -274,9 +257,10 @@ impl Flattener { /// 3. Remove all imports. /// 4. Remove all pragmas except for the ones in the target file. /// 5. Remove all license identifiers except for the one in the target file. - pub fn flatten(&self) -> String { + pub fn flatten(self) -> String { let mut updates = Updates::new(); + self.append_filenames(&mut updates); let top_level_names = self.rename_top_level_definitions(&mut updates); self.rename_contract_level_types_references(&top_level_names, &mut updates); self.remove_qualified_imports(&mut updates); @@ -289,15 +273,26 @@ impl Flattener { self.flatten_result(updates, target_pragmas, target_license).get_flattened_target() } - fn flatten_result<'a>( - &'a self, + fn flatten_result( + self, updates: Updates, target_pragmas: Vec, - target_license: Option<&'a str>, - ) -> FlatteningResult<'a> { + target_license: Option, + ) -> FlatteningResult { FlatteningResult::new(self, updates, target_pragmas, target_license) } + /// Appends a comment with the file name to the beginning of each source. + fn append_filenames(&self, updates: &mut Updates) { + for path in &self.ordered_sources { + updates.entry(path.clone()).or_default().insert(( + 0, + 0, + format!("// {}\n", path.strip_prefix(&self.project_root).unwrap_or(path).display()), + )); + } + } + /// Finds and goes over all references to file-level definitions and updates them to match /// definition name. This is needed for two reasons: /// 1. We want to rename all aliased or qualified imports. @@ -752,14 +747,14 @@ impl Flattener { /// Removes all license identifiers from all sources. Returns license identifier from target /// file, if any. - fn process_licenses(&self, updates: &mut Updates) -> Option<&str> { + fn process_licenses(&self, updates: &mut Updates) -> Option { let mut target_license = None; for loc in &self.collect_licenses() { if loc.path == self.target { let license_line = self.read_location(loc); let license_start = license_line.find("SPDX-License-Identifier:").unwrap(); - target_license = Some(license_line[license_start..].trim()); + target_license = Some(license_line[license_start..].trim().to_string()); } updates.entry(loc.path.clone()).or_default().insert(( loc.start, @@ -800,12 +795,12 @@ impl Flattener { /// Performs DFS to collect all dependencies of a target fn collect_deps( - path: &PathBuf, + path: &Path, paths: &ProjectPathsConfig, graph: &Graph, deps: &mut HashSet, ) -> Result<()> { - if deps.insert(path.clone()) { + if deps.insert(path.to_path_buf()) { let target_dir = path.parent().ok_or_else(|| { SolcError::msg(format!("failed to get parent directory for \"{}\"", path.display())) })?; @@ -836,7 +831,7 @@ fn collect_deps( /// order. If files have the same number of dependencies, we sort them alphabetically. /// Target file is always placed last. pub fn collect_ordered_deps( - path: &PathBuf, + path: &Path, paths: &ProjectPathsConfig, graph: &Graph, ) -> Result> { @@ -876,7 +871,7 @@ pub fn collect_ordered_deps( let mut ordered_deps = paths_with_deps_count.into_iter().map(|(_, path)| path).collect::>(); - ordered_deps.push(path.clone()); + ordered_deps.push(path.to_path_buf()); Ok(ordered_deps) } diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index 58c8057a6..bd4bc98ac 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -38,6 +38,11 @@ pub use filter::{FileFilter, SparseOutputFilter, TestFileFilter}; pub mod report; +/// Updates to be applied to the sources. +/// +/// `source_path -> (start, end, new_value)` +pub type Updates = HashMap>; + /// Utilities for creating, mocking and testing of (temporary) projects #[cfg(feature = "project-util")] pub mod project_util; @@ -59,10 +64,14 @@ use foundry_compilers_core::error::{Result, SolcError, SolcIoError}; use output::sources::{VersionedSourceFile, VersionedSourceFiles}; use project::ProjectCompiler; use semver::Version; +use solar_parse::Parser; +use solar_sema::interface::{diagnostics::EmittedDiagnostics, source_map::FileName, Session}; use solc::SolcSettings; use std::{ - collections::{BTreeMap, HashMap, HashSet}, + collections::{BTreeMap, BTreeSet, HashMap, HashSet}, + ops::Range, path::{Path, PathBuf}, + sync::Arc, }; /// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. @@ -172,13 +181,13 @@ where let mut sources = Vec::new(); let mut unique_paths = HashSet::new(); let (path, source) = graph.node(*target_index).unpack(); - unique_paths.insert(path.clone()); + unique_paths.insert(path); sources.push((path, source)); sources.extend( graph .all_imported_nodes(*target_index) .map(|index| graph.node(index).unpack()) - .filter(|(p, _)| unique_paths.insert(p.to_path_buf())), + .filter(|(p, _)| unique_paths.insert(*p)), ); let root = self.root(); @@ -205,27 +214,27 @@ where impl, C: Compiler> Project { /// Returns the path to the artifacts directory - pub fn artifacts_path(&self) -> &PathBuf { + pub fn artifacts_path(&self) -> &Path { &self.paths.artifacts } /// Returns the path to the sources directory - pub fn sources_path(&self) -> &PathBuf { + pub fn sources_path(&self) -> &Path { &self.paths.sources } /// Returns the path to the cache file - pub fn cache_path(&self) -> &PathBuf { + pub fn cache_path(&self) -> &Path { &self.paths.cache } /// Returns the path to the `build-info` directory nested in the artifacts dir - pub fn build_info_path(&self) -> &PathBuf { + pub fn build_info_path(&self) -> &Path { &self.paths.build_infos } /// Returns the root directory of the project - pub fn root(&self) -> &PathBuf { + pub fn root(&self) -> &Path { &self.paths.root } @@ -341,7 +350,7 @@ impl, C: Compiler> Pro std::fs::remove_file(self.cache_path()) .map_err(|err| SolcIoError::new(err, self.cache_path()))?; if let Some(cache_folder) = - self.cache_path().parent().filter(|cache_folder| self.root() != cache_folder) + self.cache_path().parent().filter(|cache_folder| self.root() != *cache_folder) { // remove the cache folder if the cache file was the only file if cache_folder @@ -360,14 +369,14 @@ impl, C: Compiler> Pro // clean the artifacts dir if self.artifacts_path().exists() && self.root() != self.artifacts_path() { std::fs::remove_dir_all(self.artifacts_path()) - .map_err(|err| SolcIoError::new(err, self.artifacts_path().clone()))?; + .map_err(|err| SolcIoError::new(err, self.artifacts_path()))?; trace!("removed artifacts dir \"{}\"", self.artifacts_path().display()); } // also clean the build-info dir, in case it's not nested in the artifacts dir if self.build_info_path().exists() && self.root() != self.build_info_path() { std::fs::remove_dir_all(self.build_info_path()) - .map_err(|err| SolcIoError::new(err, self.build_info_path().clone()))?; + .map_err(|err| SolcIoError::new(err, self.build_info_path()))?; tracing::trace!("removed build-info dir \"{}\"", self.build_info_path().display()); } @@ -882,6 +891,58 @@ fn rebase_path(base: &Path, path: &Path) -> PathBuf { new_path.to_slash_lossy().into_owned().into() } +/// Utility function to apply a set of updates to provided sources. +pub fn apply_updates(sources: &mut Sources, updates: Updates) { + for (path, source) in sources { + if let Some(updates) = updates.get(path) { + source.content = Arc::new(replace_source_content( + source.content.as_str(), + updates.iter().map(|(start, end, update)| ((*start..*end), update.as_str())), + )); + } + } +} + +/// Utility function to change source content ranges with provided updates. +/// Assumes that the updates are sorted. +pub fn replace_source_content( + source: impl Into, + updates: impl IntoIterator, impl AsRef)>, +) -> String { + let mut offset = 0; + let mut content = source.into(); + for (range, new_value) in updates { + let update_range = utils::range_by_offset(&range, offset); + let new_value = new_value.as_ref(); + content.replace_range(update_range.clone(), new_value); + offset += new_value.len() as isize - (update_range.end - update_range.start) as isize; + } + content +} + +pub(crate) fn parse_one_source( + content: &str, + path: &Path, + f: impl FnOnce(solar_sema::ast::SourceUnit<'_>) -> R, +) -> Result { + let sess = Session::builder().with_buffer_emitter(Default::default()).build(); + let res = sess.enter(|| -> solar_parse::interface::Result<_> { + let arena = solar_parse::ast::Arena::new(); + let filename = FileName::Real(path.to_path_buf()); + let mut parser = Parser::from_source_code(&sess, &arena, filename, content.to_string())?; + let ast = parser.parse_file().map_err(|e| e.emit())?; + Ok(f(ast)) + }); + + // Return if any diagnostics emitted during content parsing. + if let Err(err) = sess.emitted_errors().unwrap() { + trace!("failed parsing {path:?}:\n{err}"); + return Err(err); + } + + Ok(res.unwrap()) +} + #[cfg(test)] #[cfg(feature = "svm-solc")] mod tests { @@ -1031,4 +1092,54 @@ mod tests { .unwrap(); assert!(resolved.exists()); } + + #[test] + fn test_replace_source_content() { + let original_content = r#" +library Lib { + function libFn() internal { + // logic to keep + } +} +contract A { + function a() external {} + function b() public {} + function c() internal { + // logic logic logic + } + function d() private {} + function e() external { + // logic logic logic + } +}"#; + + let updates = vec![ + // Replace function libFn() visibility to external + (36..44, "external"), + // Replace contract A name to contract B + (80..90, "contract B"), + // Remove function c() + (159..222, ""), + // Replace function e() logic + (276..296, "// no logic"), + ]; + + assert_eq!( + replace_source_content(original_content, updates), + r#" +library Lib { + function libFn() external { + // logic to keep + } +} +contract B { + function a() external {} + function b() public {} + function d() private {} + function e() external { + // no logic + } +}"# + ); + } } diff --git a/crates/compilers/src/project_util/mod.rs b/crates/compilers/src/project_util/mod.rs index f8844aeb8..6473dc62c 100644 --- a/crates/compilers/src/project_util/mod.rs +++ b/crates/compilers/src/project_util/mod.rs @@ -133,7 +133,12 @@ impl< T: ArtifactOutput + Default, > TempProject { - /// Makes sure all resources are created + /// Wraps an existing project in a temp dir. + pub fn from_project(inner: Project) -> std::result::Result { + Self::create_new(tempdir("tmp_project")?, inner) + } + + /// Makes sure all resources are created. pub fn create_new( root: TempDir, inner: Project, @@ -215,6 +220,14 @@ impl< &mut self.project_mut().paths } + /// Deletes the current project and copies it from `source`. + pub fn copy_project_from(&self, source: &Path) -> Result<()> { + let root = self.root(); + std::fs::remove_dir_all(root).map_err(|e| SolcIoError::new(e, root))?; + std::fs::create_dir_all(root).map_err(|e| SolcIoError::new(e, root))?; + copy_dir(source, root) + } + /// Copies a single file into the projects source pub fn copy_source(&self, source: &Path) -> Result<()> { copy_file(source, &self.paths().sources) diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index ddd36dfeb..4b0c722c4 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -169,18 +169,18 @@ impl GraphEdges { } /// Returns all files imported by the given file - pub fn imports(&self, file: &Path) -> HashSet<&PathBuf> { + pub fn imports(&self, file: &Path) -> HashSet<&Path> { if let Some(start) = self.indices.get(file).copied() { - NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() + NodesIter::new(start, self).skip(1).map(move |idx| &*self.rev_indices[&idx]).collect() } else { HashSet::new() } } /// Returns all files that import the given file - pub fn importers(&self, file: &Path) -> HashSet<&PathBuf> { + pub fn importers(&self, file: &Path) -> HashSet<&Path> { if let Some(start) = self.indices.get(file).copied() { - self.rev_edges[start].iter().map(move |idx| &self.rev_indices[idx]).collect() + self.rev_edges[start].iter().map(move |idx| &*self.rev_indices[idx]).collect() } else { HashSet::new() } @@ -192,7 +192,7 @@ impl GraphEdges { } /// Returns the path of the given node - pub fn node_path(&self, id: usize) -> &PathBuf { + pub fn node_path(&self, id: usize) -> &Path { &self.rev_indices[&id] } @@ -327,7 +327,7 @@ impl> Graph { } /// Returns all files imported by the given file - pub fn imports(&self, path: &Path) -> HashSet<&PathBuf> { + pub fn imports(&self, path: &Path) -> HashSet<&Path> { self.edges.imports(path) } @@ -1121,7 +1121,7 @@ impl Node { &self.source.content } - pub fn unpack(&self) -> (&PathBuf, &Source) { + pub fn unpack(&self) -> (&Path, &Source) { (&self.path, &self.source) } } @@ -1199,8 +1199,8 @@ mod tests { let dapp_test = graph.node(1); assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol")); assert_eq!( - dapp_test.data.imports.iter().map(|i| i.data().path()).collect::>(), - vec![&PathBuf::from("ds-test/test.sol"), &PathBuf::from("./Dapp.sol")] + dapp_test.data.imports.iter().map(|i| i.data().path()).collect::>(), + vec![Path::new("ds-test/test.sol"), Path::new("./Dapp.sol")] ); assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]); } diff --git a/crates/compilers/src/resolver/parse.rs b/crates/compilers/src/resolver/parse.rs index 4f93c230b..0627bb01c 100644 --- a/crates/compilers/src/resolver/parse.rs +++ b/crates/compilers/src/resolver/parse.rs @@ -8,6 +8,7 @@ use std::{ /// Represents various information about a Solidity file. #[derive(Clone, Debug)] +#[non_exhaustive] pub struct SolData { pub license: Option>, pub version: Option>, @@ -50,19 +51,8 @@ impl SolData { let mut contract_names = Vec::new(); let mut parse_result = Ok(()); - let sess = solar_parse::interface::Session::builder() - .with_buffer_emitter(Default::default()) - .build(); - sess.enter(|| { - let arena = ast::Arena::new(); - let filename = solar_parse::interface::source_map::FileName::Real(file.to_path_buf()); - let Ok(mut parser) = - solar_parse::Parser::from_source_code(&sess, &arena, filename, content.to_string()) - else { - return; - }; - let Ok(ast) = parser.parse_file().map_err(|e| e.emit()) else { return }; - for item in ast.items { + let result = crate::parse_one_source(content, file, |ast| { + for item in ast.items.iter() { let loc = item.span.lo().to_usize()..item.span.hi().to_usize(); match &item.kind { ast::ItemKind::Pragma(pragma) => match &pragma.tokens { @@ -82,9 +72,9 @@ impl SolData { ast::ItemKind::Import(import) => { let path = import.path.value.to_string(); let aliases = match &import.items { - ast::ImportItems::Plain(None) | ast::ImportItems::Glob(None) => &[][..], + ast::ImportItems::Plain(None) => &[][..], ast::ImportItems::Plain(Some(alias)) - | ast::ImportItems::Glob(Some(alias)) => &[(*alias, None)][..], + | ast::ImportItems::Glob(alias) => &[(*alias, None)][..], ast::ImportItems::Aliases(aliases) => aliases, }; let sol_import = SolImport::new(PathBuf::from(path)).set_aliases( @@ -113,7 +103,7 @@ impl SolData { } } }); - if let Err(e) = sess.emitted_errors().unwrap() { + if let Err(e) = result { let e = e.to_string(); trace!("failed parsing {file:?}: {e}"); parse_result = Err(e); @@ -205,11 +195,11 @@ impl SolImport { Self { path, aliases: vec![] } } - pub fn path(&self) -> &PathBuf { + pub fn path(&self) -> &Path { &self.path } - pub fn aliases(&self) -> &Vec { + pub fn aliases(&self) -> &[SolImportAlias] { &self.aliases } diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index ef84d65ad..d8c085290 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -14,7 +14,8 @@ use foundry_compilers::{ }, flatten::Flattener, info::ContractInfo, - multi::MultiCompilerRestrictions, + multi::{MultiCompilerInput, MultiCompilerRestrictions}, + project::{Preprocessor, ProjectCompiler}, project_util::*, solc::{Restriction, SolcRestrictions, SolcSettings}, take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, @@ -34,6 +35,7 @@ use semver::Version; use similar_asserts::assert_eq; use std::{ collections::{BTreeMap, BTreeSet, HashMap, HashSet}, + env, fs::{self}, io, path::{Path, PathBuf, MAIN_SEPARATOR}, @@ -4154,3 +4156,46 @@ contract A { } ); }); } + +#[test] +fn can_preprocess() { + #[derive(Debug)] + struct SimplePreprocessor(tempfile::NamedTempFile); + + impl Preprocessor for SimplePreprocessor { + fn preprocess( + &self, + _compiler: &MultiCompiler, + input: &mut MultiCompilerInput, + _paths: &ProjectPathsConfig, + mocks: &mut HashSet, + ) -> foundry_compilers::error::Result<()> { + let MultiCompilerInput::Solc(input) = input else { + return Ok(()); + }; + for src in input.input.sources.values_mut() { + src.content = src.content.replace("++", "--").into(); + } + mocks.insert(self.0.path().to_path_buf()); + Ok(()) + } + } + + let root = + canonicalize(Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/preprocessor")) + .unwrap(); + + let project = TempProject::::dapptools().unwrap(); + project.copy_project_from(&root).unwrap(); + let r = ProjectCompiler::new(project.project()) + .unwrap() + .with_preprocessor(SimplePreprocessor(tempfile::NamedTempFile::new().unwrap())) + .compile(); + + let compiled = match r { + Ok(compiled) => compiled, + Err(e) => panic!("failed to compile: {e}"), + }; + compiled.assert_success(); + assert!(!compiled.is_unchanged()); +} diff --git a/crates/core/src/error.rs b/crates/core/src/error.rs index 0cecacf2c..b6526f004 100644 --- a/crates/core/src/error.rs +++ b/crates/core/src/error.rs @@ -72,6 +72,9 @@ pub enum SolcError { #[error("no artifact found for `{}:{}`", .0.display(), .1)] ArtifactNotFound(PathBuf, String), + #[error(transparent)] + Fmt(#[from] std::fmt::Error), + #[cfg(feature = "project-util")] #[error(transparent)] FsExtra(#[from] fs_extra::error::Error), diff --git a/crates/core/src/utils/mod.rs b/crates/core/src/utils/mod.rs index 2c6cc22b9..9f0f4216c 100644 --- a/crates/core/src/utils/mod.rs +++ b/crates/core/src/utils/mod.rs @@ -566,10 +566,10 @@ mod tests { // // `dir_path` // ├── dependency - // │   └── Math.sol + // │ └── Math.sol // └── project // ├── node_modules - // │   └── dependency -> symlink to actual 'dependency' directory + // │ └── dependency -> symlink to actual 'dependency' directory // └── src (`cwd`) // └── Token.sol diff --git a/crates/core/src/utils/re.rs b/crates/core/src/utils/re.rs index 6136e5d29..f946fd813 100644 --- a/crates/core/src/utils/re.rs +++ b/crates/core/src/utils/re.rs @@ -27,6 +27,9 @@ pub static RE_SOL_SDPX_LICENSE_IDENTIFIER: Lazy = /// A regex used to remove extra lines in flatenned files pub static RE_THREE_OR_MORE_NEWLINES: Lazy = Lazy::new(|| Regex::new("\n{3,}").unwrap()); +/// A regex used to remove extra lines in flatenned files +pub static RE_TWO_OR_MORE_SPACES: Lazy = Lazy::new(|| Regex::new(" {2,}").unwrap()); + /// A regex that matches version pragma in a Vyper pub static RE_VYPER_VERSION: Lazy = Lazy::new(|| Regex::new(r"#(?:pragma version|@version)\s+(?P.+)").unwrap()); diff --git a/test-data/preprocessor/src/Counter.sol b/test-data/preprocessor/src/Counter.sol new file mode 100644 index 000000000..d7d69d250 --- /dev/null +++ b/test-data/preprocessor/src/Counter.sol @@ -0,0 +1,12 @@ +// Contract without constructor +contract Counter { + uint256 public number; + + function setNumber(uint256 newNumber) public { + number = newNumber; + } + + function increment() public { + number++; + } +} diff --git a/test-data/preprocessor/src/CounterB.sol b/test-data/preprocessor/src/CounterB.sol new file mode 100644 index 000000000..20f72922c --- /dev/null +++ b/test-data/preprocessor/src/CounterB.sol @@ -0,0 +1,8 @@ +// Contract without constructor +contract CounterB { + uint256 public number; + + constructor(address a, uint256 b, + bool c, + address d) {} +} diff --git a/test-data/preprocessor/src/CounterC.sol b/test-data/preprocessor/src/CounterC.sol new file mode 100644 index 000000000..2d5f51581 --- /dev/null +++ b/test-data/preprocessor/src/CounterC.sol @@ -0,0 +1,10 @@ +// Contract without constructor +contract CounterC { + struct CounterCStruct { + address a; + bool b; + } + uint256 public number; + + constructor(string memory _name, uint _age, address _wallet) {} +} diff --git a/test-data/preprocessor/src/CounterD.sol b/test-data/preprocessor/src/CounterD.sol new file mode 100644 index 000000000..0b11bf307 --- /dev/null +++ b/test-data/preprocessor/src/CounterD.sol @@ -0,0 +1,4 @@ +// Contract with constructor args without name +contract CounterD { + constructor(address, uint256 x, uint256) {} +} diff --git a/test-data/preprocessor/src/CounterE.sol b/test-data/preprocessor/src/CounterE.sol new file mode 100644 index 000000000..a7f5e5a42 --- /dev/null +++ b/test-data/preprocessor/src/CounterE.sol @@ -0,0 +1,13 @@ +// Contracts with payable constructor +contract CounterE { + constructor() payable {} +} + +contract CounterF { + constructor(uint256 x) payable {} +} + +contract CounterG { + constructor(address) payable {} +} + diff --git a/test-data/preprocessor/src/v1/Counter.sol b/test-data/preprocessor/src/v1/Counter.sol new file mode 100644 index 000000000..e302c8b3f --- /dev/null +++ b/test-data/preprocessor/src/v1/Counter.sol @@ -0,0 +1,12 @@ +// Same as Counter but different version. Test preprocessor aliased imports. +contract Counter { + uint256 public number; + + function setNumber(uint256 newNumber) public { + number = newNumber; + } + + function increment() public { + number++; + } +} diff --git a/test-data/preprocessor/test/CounterTest.sol b/test-data/preprocessor/test/CounterTest.sol new file mode 100644 index 000000000..7eb79bceb --- /dev/null +++ b/test-data/preprocessor/test/CounterTest.sol @@ -0,0 +1,33 @@ +import {Counter} from "src/Counter.sol"; +import {Counter as CounterV1} from "src/v1/Counter.sol"; +import "src/CounterB.sol"; +import "src/CounterC.sol"; +import "src/CounterD.sol"; +import "src/CounterE.sol"; + +contract CounterTest { + Counter public counter; + Counter public counter2 = new Counter(); + CounterB public counter3 = new CounterB(address(this), 44, true, address(this)); + CounterB public counter4 = new CounterB({a:address(this), b: 44, c: true, d: address(this)}); + CounterV1 public counterv1; + Counter public counter5 = new Counter{salt: bytes32("123")}(); + CounterB public counter6 = new CounterB {salt: bytes32("123")} ( address(this), 44, true, address(this)); + CounterE public counter7 = new CounterE{ value: 111, salt: bytes32("123")}(); + CounterF public counter8 = new CounterF{value: 222, salt: bytes32("123")}(11); + CounterG public counter9 = new CounterG { value: 333, salt: bytes32("123") } ( address(this)); + CounterG public counter10 = new CounterG{ value: 333 }(address(this)); + + function setUp() public { + counter = new Counter(); + counterv1 = new CounterV1( ); + type(CounterV1).creationCode; + CounterB counterB = new CounterB(address(this), 15, false, address(counter)); + CounterC counterC = new CounterC( + "something", + 35, + address(this) + ); + CounterD counterD = new CounterD(address(this), 15, 15); + } +} \ No newline at end of file