diff --git a/Cargo.toml b/Cargo.toml index e0569f7ba..ebc76d0aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] authors = ["Foundry Maintainers"] -version = "0.18.2" +version = "0.18.4" rust-version = "1.88" readme = "README.md" license = "MIT OR Apache-2.0" @@ -36,11 +36,11 @@ redundant-lifetimes = "warn" all = "warn" [workspace.dependencies] -foundry-compilers = { path = "crates/compilers", version = "0.18.2" } -foundry-compilers-artifacts = { path = "crates/artifacts/artifacts", version = "0.18.2" } -foundry-compilers-artifacts-solc = { path = "crates/artifacts/solc", version = "0.18.2" } -foundry-compilers-artifacts-vyper = { path = "crates/artifacts/vyper", version = "0.18.2" } -foundry-compilers-core = { path = "crates/core", version = "0.18.2" } +foundry-compilers = { path = "crates/compilers", version = "0.18.4" } +foundry-compilers-artifacts = { path = "crates/artifacts/artifacts", version = "0.18.4" } +foundry-compilers-artifacts-solc = { path = "crates/artifacts/solc", version = "0.18.4" } +foundry-compilers-artifacts-vyper = { path = "crates/artifacts/vyper", version = "0.18.4" } +foundry-compilers-core = { path = "crates/core", version = "0.18.4" } alloy-json-abi = { version = "1.3", features = ["serde_json"] } alloy-primitives = { version = "1.3", features = ["serde", "rand"] } @@ -54,8 +54,8 @@ semver = { version = "1.0", features = ["serde"] } serde = { version = "1", features = ["derive", "rc"] } serde_json = "1.0" similar-asserts = "1" -solar-parse = { version = "=0.1.5", default-features = false } -solar-sema = { version = "=0.1.5", default-features = false } +solar-parse = { version = "=0.1.6", default-features = false } +solar-sema = { version = "=0.1.6", default-features = false } svm = { package = "svm-rs", version = "0.5", default-features = false } tempfile = "3.20" thiserror = "2" @@ -69,7 +69,7 @@ tokio = { version = "1.47", features = ["rt-multi-thread"] } snapbox = "0.6.21" -# [patch.crates-io] +[patch.crates-io] # solar-parse = { git = "https://github.com/paradigmxyz/solar", branch = "main" } # solar-sema = { git = "https://github.com/paradigmxyz/solar", branch = "main" } # solar-ast = { git = "https://github.com/paradigmxyz/solar", branch = "main" } diff --git a/crates/artifacts/solc/src/sources.rs b/crates/artifacts/solc/src/sources.rs index c10191831..b8ac1c821 100644 --- a/crates/artifacts/solc/src/sources.rs +++ b/crates/artifacts/solc/src/sources.rs @@ -1,4 +1,4 @@ -use foundry_compilers_core::error::SolcIoError; +use foundry_compilers_core::error::{SolcError, SolcIoError}; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -137,6 +137,30 @@ impl Source { Ok(Self::new(content)) } + /// [`read`](Self::read) + mapping error to [`SolcError`]. + pub fn read_(file: &Path) -> Result { + Self::read(file).map_err(|err| { + let exists = err.path().exists(); + if !exists && err.path().is_symlink() { + return SolcError::ResolveBadSymlink(err); + } + + // This is an additional check useful on OS that have case-sensitive paths, + // see also + // check if there exists a file with different case + #[cfg(feature = "walkdir")] + if !exists { + if let Some(existing_file) = + foundry_compilers_core::utils::find_case_sensitive_existing_file(file) + { + return SolcError::ResolveCaseSensitiveFileName { error: err, existing_file }; + } + } + + SolcError::Resolve(err) + }) + } + /// Returns `true` if the source should be compiled with full output selection. pub fn is_dirty(&self) -> bool { self.kind.is_dirty() diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 07313a37c..90e50d80b 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -6,7 +6,7 @@ use crate::{ output::Builds, resolver::GraphEdges, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project, - ProjectPaths, ProjectPathsConfig, SourceCompilationKind, + ProjectPaths, ProjectPathsConfig, SourceCompilationKind, SourceParser, }; use foundry_compilers_artifacts::{ sources::{Source, Sources}, @@ -658,7 +658,7 @@ pub(crate) struct ArtifactsCacheInner< pub cached_builds: Builds, /// Relationship between all the files. - pub edges: GraphEdges, + pub edges: GraphEdges, /// The project. pub project: &'a Project, @@ -723,6 +723,7 @@ impl, C: Compiler> /// Gets or calculates the interface representation hash for the given source file. fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str { self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| { + // TODO: use `interface_representation_ast` directly with `edges.parser()`. if let Some(r) = interface_repr_hash(&source.content, file) { return r; } @@ -823,10 +824,10 @@ impl, C: Compiler> // Walks over all cache entries, detects dirty files and removes them from cache. fn find_and_remove_dirty(&mut self) { - fn populate_dirty_files( + fn populate_dirty_files( file: &Path, dirty_files: &mut HashSet, - edges: &GraphEdges, + edges: &GraphEdges

, ) { for file in edges.importers(file) { // If file is marked as dirty we either have already visited it or it was marked as @@ -890,7 +891,7 @@ impl, C: Compiler> // Build a temporary graph for walking imports. We need this because `self.edges` // only contains graph data for in-scope sources but we are operating on cache entries. - if let Ok(graph) = Graph::::resolve_sources(&self.project.paths, sources) { + if let Ok(graph) = Graph::::resolve_sources(&self.project.paths, sources) { let (sources, edges) = graph.into_sources(); // Calculate content hashes for later comparison. @@ -1020,7 +1021,7 @@ pub(crate) enum ArtifactsCache< C: Compiler, > { /// Cache nothing on disk - Ephemeral(GraphEdges, &'a Project), + Ephemeral(GraphEdges, &'a Project), /// Handles the actual cached artifacts, detects artifacts that can be reused Cached(ArtifactsCacheInner<'a, T, C>), } @@ -1032,7 +1033,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> #[instrument(name = "ArtifactsCache::new", skip(project, edges))] pub fn new( project: &'a Project, - edges: GraphEdges, + edges: GraphEdges, preprocessed: bool, ) -> Result { /// Returns the [CompilerCache] to use @@ -1117,7 +1118,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> } /// Returns the graph data for this project - pub fn graph(&self) -> &GraphEdges { + pub fn graph(&self) -> &GraphEdges { match self { ArtifactsCache::Ephemeral(graph, _) => graph, ArtifactsCache::Cached(inner) => &inner.edges, @@ -1191,18 +1192,22 @@ impl<'a, T: ArtifactOutput, C: Compiler> /// /// Returns all the _cached_ artifacts. #[instrument(name = "ArtifactsCache::consume", skip_all)] + #[allow(clippy::type_complexity)] pub fn consume( self, written_artifacts: &Artifacts, written_build_infos: &Vec>, write_to_disk: bool, - ) -> Result<(Artifacts, Builds)> + ) -> Result<(Artifacts, Builds, GraphEdges)> where T: ArtifactOutput, { - let ArtifactsCache::Cached(cache) = self else { - trace!("no cache configured, ephemeral"); - return Ok(Default::default()); + let cache = match self { + ArtifactsCache::Ephemeral(edges, _project) => { + trace!("no cache configured, ephemeral"); + return Ok((Default::default(), Default::default(), edges)); + } + ArtifactsCache::Cached(cache) => cache, }; let ArtifactsCacheInner { @@ -1212,7 +1217,9 @@ impl<'a, T: ArtifactOutput, C: Compiler> dirty_sources, sources_in_scope, project, - .. + edges, + content_hashes: _, + interface_repr_hashes: _, } = cache; // Remove cached artifacts which are out of scope, dirty or appear in `written_artifacts`. @@ -1264,7 +1271,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> cache.write(project.cache_path())?; } - Ok((cached_artifacts, cached_builds)) + Ok((cached_artifacts, cached_builds, edges)) } /// Marks the cached entry as seen by the compiler, if it's cached. diff --git a/crates/compilers/src/cache/iface.rs b/crates/compilers/src/cache/iface.rs index ff29be147..aea2e4393 100644 --- a/crates/compilers/src/cache/iface.rs +++ b/crates/compilers/src/cache/iface.rs @@ -1,5 +1,5 @@ use crate::{parse_one_source, replace_source_content}; -use solar_sema::{ +use solar_parse::{ ast::{self, Span}, interface::diagnostics::EmittedDiagnostics, }; @@ -11,7 +11,7 @@ pub(crate) fn interface_repr_hash(content: &str, path: &Path) -> Option } pub(crate) fn interface_repr(content: &str, path: &Path) -> Result { - parse_one_source(content, path, |ast| interface_representation_ast(content, &ast)) + parse_one_source(content, path, |sess, ast| interface_representation_ast(content, sess, ast)) } /// Helper function to remove parts of the contract which do not alter its interface: @@ -21,6 +21,7 @@ pub(crate) fn interface_repr(content: &str, path: &Path) -> Result, ) -> String { let mut spans_to_remove: Vec = Vec::new(); @@ -57,9 +58,9 @@ pub(crate) fn interface_representation_ast( } } } - let content = - replace_source_content(content, spans_to_remove.iter().map(|span| (span.to_range(), ""))) - .replace("\n", ""); + let updates = + spans_to_remove.iter().map(|&span| (sess.source_map().span_to_source(span).unwrap().1, "")); + let content = replace_source_content(content, updates).replace("\n", ""); crate::utils::RE_TWO_OR_MORE_SPACES.replace_all(&content, "").into_owned() } diff --git a/crates/compilers/src/compile/output/mod.rs b/crates/compilers/src/compile/output/mod.rs index fcc014840..d20681b2c 100644 --- a/crates/compilers/src/compile/output/mod.rs +++ b/crates/compilers/src/compile/output/mod.rs @@ -19,6 +19,7 @@ use crate::{ compilers::{ multi::MultiCompiler, CompilationError, Compiler, CompilerContract, CompilerOutput, }, + resolver::GraphEdges, Artifact, ArtifactId, ArtifactOutput, Artifacts, ConfigurableArtifacts, }; @@ -62,7 +63,7 @@ impl IntoIterator for Builds { /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. -#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default)] pub struct ProjectCompileOutput< C: Compiler = MultiCompiler, T: ArtifactOutput = ConfigurableArtifacts, @@ -81,11 +82,23 @@ pub struct ProjectCompileOutput< pub(crate) compiler_severity_filter: Severity, /// all build infos that were just compiled pub(crate) builds: Builds, + /// The relationship between the source files and their imports + pub(crate) edges: GraphEdges, } impl, C: Compiler> ProjectCompileOutput { + /// Returns the parser used to parse the sources. + pub fn parser(&self) -> &C::Parser { + self.edges.parser() + } + + /// Returns the parser used to parse the sources. + pub fn parser_mut(&mut self) -> &mut C::Parser { + self.edges.parser_mut() + } + /// Converts all `\\` separators in _all_ paths to `/` #[instrument(skip_all)] pub fn slash_paths(&mut self) { @@ -460,6 +473,11 @@ impl, C: Compiler> pub fn builds(&self) -> impl Iterator)> { self.builds.iter() } + + /// Returns the source graph of the project. + pub fn graph(&self) -> &GraphEdges { + &self.edges + } } impl> diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index 32859e72a..cbfbf7070 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -146,7 +146,7 @@ pub struct ProjectCompiler< C: Compiler, > { /// Contains the relationship of the source files and their imports - edges: GraphEdges, + edges: GraphEdges, project: &'a Project, /// A mapping from a source file path to the primary profile name selected for it. primary_profiles: HashMap, @@ -381,7 +381,7 @@ impl, C: Compiler> let skip_write_to_disk = project.no_artifacts || has_error; trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file"); - let (cached_artifacts, cached_builds) = + let (cached_artifacts, cached_builds, edges) = cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?; project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; @@ -404,6 +404,7 @@ impl, C: Compiler> ignored_file_paths, compiler_severity_filter, builds, + edges, }) } } diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs index 00810518f..565ffbf60 100644 --- a/crates/compilers/src/compilers/mod.rs +++ b/crates/compilers/src/compilers/mod.rs @@ -1,4 +1,4 @@ -use crate::ProjectPathsConfig; +use crate::{resolver::Node, ProjectPathsConfig}; use alloy_json_abi::JsonAbi; use core::fmt; use foundry_compilers_artifacts::{ @@ -9,6 +9,7 @@ use foundry_compilers_artifacts::{ BytecodeObject, CompactContractRef, Contract, FileToContractsMap, Severity, SourceFile, }; use foundry_compilers_core::error::Result; +use rayon::prelude::*; use semver::{Version, VersionReq}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ @@ -139,12 +140,40 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { fn strip_prefix(&mut self, base: &Path); } +/// [`ParsedSource`] parser. +pub trait SourceParser: Clone + Debug + Send + Sync { + type ParsedSource: ParsedSource; + + /// Creates a new parser for the given config. + fn new(config: &ProjectPathsConfig) -> Self; + + /// Reads and parses the source file at the given path. + fn read(&mut self, path: &Path) -> Result> { + Node::read(path) + } + + /// Parses the sources in the given sources map. + fn parse_sources( + &mut self, + sources: &mut Sources, + ) -> Result)>> { + sources + .0 + .par_iter() + .map(|(path, source)| { + let data = Self::ParsedSource::parse(source.as_ref(), path)?; + Ok((path.clone(), Node::new(path.clone(), source.clone(), data))) + }) + .collect::>() + } +} + /// Parser of the source files which is used to identify imports and version requirements of the /// given source. /// /// Used by path resolver to resolve imports or determine compiler versions needed to compiler given /// sources. -pub trait ParsedSource: Debug + Sized + Send + Clone { +pub trait ParsedSource: Clone + Debug + Sized + Send { type Language: Language; /// Parses the content of the source file. @@ -331,7 +360,7 @@ pub trait Compiler: Send + Sync + Clone { /// Output data for each contract type CompilerContract: CompilerContract; /// Source parser used for resolving imports and version requirements. - type ParsedSource: ParsedSource; + type Parser: SourceParser>; /// Compiler settings. type Settings: CompilerSettings; /// Enum of languages supported by the compiler. diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index f729d508d..3f5ef5359 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -10,9 +10,11 @@ use super::{ }; use crate::{ artifacts::vyper::{VyperCompilationError, VyperSettings}, - resolver::parse::SolData, + parser::VyperParser, + resolver::parse::{SolData, SolParser}, settings::VyperRestrictions, solc::SolcRestrictions, + SourceParser, }; use foundry_compilers_artifacts::{ error::SourceLocation, @@ -66,6 +68,12 @@ pub enum MultiCompilerLanguage { Vyper(VyperLanguage), } +impl Default for MultiCompilerLanguage { + fn default() -> Self { + Self::Solc(SolcLanguage::Solidity) + } +} + impl MultiCompilerLanguage { pub fn is_vyper(&self) -> bool { matches!(self, Self::Vyper(_)) @@ -101,6 +109,35 @@ impl fmt::Display for MultiCompilerLanguage { } } +/// Source parser for the [`MultiCompiler`]. Recognizes Solc and Vyper sources. +#[derive(Clone, Debug)] +pub struct MultiCompilerParser { + solc: SolParser, + vyper: VyperParser, +} + +impl MultiCompilerParser { + /// Returns the parser used to parse Solc sources. + pub fn solc(&self) -> &SolParser { + &self.solc + } + + /// Returns the parser used to parse Solc sources. + pub fn solc_mut(&mut self) -> &mut SolParser { + &mut self.solc + } + + /// Returns the parser used to parse Vyper sources. + pub fn vyper(&self) -> &VyperParser { + &self.vyper + } + + /// Returns the parser used to parse Vyper sources. + pub fn vyper_mut(&mut self) -> &mut VyperParser { + &mut self.vyper + } +} + /// Source parser for the [MultiCompiler]. Recognizes Solc and Vyper sources. #[derive(Clone, Debug)] pub enum MultiCompilerParsedSource { @@ -287,7 +324,7 @@ impl CompilerInput for MultiCompilerInput { impl Compiler for MultiCompiler { type Input = MultiCompilerInput; type CompilationError = MultiCompilerError; - type ParsedSource = MultiCompilerParsedSource; + type Parser = MultiCompilerParser; type Settings = MultiCompilerSettings; type Language = MultiCompilerLanguage; type CompilerContract = Contract; @@ -327,20 +364,67 @@ impl Compiler for MultiCompiler { } } +impl SourceParser for MultiCompilerParser { + type ParsedSource = MultiCompilerParsedSource; + + fn new(config: &crate::ProjectPathsConfig) -> Self { + Self { solc: SolParser::new(config), vyper: VyperParser::new(config) } + } + + fn read(&mut self, path: &Path) -> Result> { + Ok(match guess_lang(path)? { + MultiCompilerLanguage::Solc(_) => { + self.solc.read(path)?.map_data(MultiCompilerParsedSource::Solc) + } + MultiCompilerLanguage::Vyper(_) => { + self.vyper.read(path)?.map_data(MultiCompilerParsedSource::Vyper) + } + }) + } + + fn parse_sources( + &mut self, + sources: &mut Sources, + ) -> Result)>> { + let mut vyper = Sources::new(); + sources.retain(|path, source| { + if let Ok(lang) = guess_lang(path) { + match lang { + MultiCompilerLanguage::Solc(_) => {} + MultiCompilerLanguage::Vyper(_) => { + vyper.insert(path.clone(), source.clone()); + return false; + } + } + } + true + }); + + let solc_nodes = self.solc.parse_sources(sources)?; + let vyper_nodes = self.vyper.parse_sources(&mut vyper)?; + Ok(solc_nodes + .into_iter() + .map(|(k, v)| (k, v.map_data(MultiCompilerParsedSource::Solc))) + .chain( + vyper_nodes + .into_iter() + .map(|(k, v)| (k, v.map_data(MultiCompilerParsedSource::Vyper))), + ) + .collect()) + } +} + impl ParsedSource for MultiCompilerParsedSource { type Language = MultiCompilerLanguage; - fn parse(content: &str, file: &std::path::Path) -> Result { - let Some(extension) = file.extension().and_then(|e| e.to_str()) else { - return Err(SolcError::msg("failed to resolve file extension")); - }; - - if SOLC_EXTENSIONS.contains(&extension) { - ::parse(content, file).map(Self::Solc) - } else if VYPER_EXTENSIONS.contains(&extension) { - VyperParsedSource::parse(content, file).map(Self::Vyper) - } else { - Err(SolcError::msg("unexpected file extension")) + fn parse(content: &str, file: &Path) -> Result { + match guess_lang(file)? { + MultiCompilerLanguage::Solc(_) => { + ::parse(content, file).map(Self::Solc) + } + MultiCompilerLanguage::Vyper(_) => { + VyperParsedSource::parse(content, file).map(Self::Vyper) + } } } @@ -399,6 +483,24 @@ impl ParsedSource for MultiCompilerParsedSource { } } +fn guess_lang(path: &Path) -> Result { + let extension = path + .extension() + .and_then(|e| e.to_str()) + .ok_or_else(|| SolcError::msg("failed to resolve file extension"))?; + if SOLC_EXTENSIONS.contains(&extension) { + Ok(MultiCompilerLanguage::Solc(match extension { + "sol" => SolcLanguage::Solidity, + "yul" => SolcLanguage::Yul, + _ => unreachable!(), + })) + } else if VYPER_EXTENSIONS.contains(&extension) { + Ok(MultiCompilerLanguage::Vyper(VyperLanguage::default())) + } else { + Err(SolcError::msg("unexpected file extension")) + } +} + impl CompilationError for MultiCompilerError { fn is_warning(&self) -> bool { match self { diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 7a2f166a3..ee31cf39c 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -2,8 +2,13 @@ use super::{ restrictions::CompilerSettingsRestrictions, CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, Language, ParsedSource, }; -use crate::resolver::parse::SolData; -pub use foundry_compilers_artifacts::SolcLanguage; +use crate::{ + resolver::{ + parse::{SolData, SolParser}, + Node, + }, + SourceParser, +}; use foundry_compilers_artifacts::{ error::SourceLocation, output_selection::OutputSelection, @@ -11,7 +16,8 @@ use foundry_compilers_artifacts::{ sources::{Source, Sources}, BytecodeHash, Contract, Error, EvmVersion, Settings, Severity, SolcInput, }; -use foundry_compilers_core::error::Result; +use foundry_compilers_core::error::{Result, SolcError, SolcIoError}; +use rayon::prelude::*; use semver::Version; use serde::{Deserialize, Serialize}; use std::{ @@ -20,6 +26,9 @@ use std::{ ops::{Deref, DerefMut}, path::{Path, PathBuf}, }; + +pub use foundry_compilers_artifacts::SolcLanguage; + mod compiler; pub use compiler::{Solc, SOLC_EXTENSIONS}; @@ -40,7 +49,7 @@ impl Language for SolcLanguage { impl Compiler for SolcCompiler { type Input = SolcVersionedInput; type CompilationError = Error; - type ParsedSource = SolData; + type Parser = SolParser; type Settings = SolcSettings; type Language = SolcLanguage; type CompilerContract = Contract; @@ -355,6 +364,91 @@ impl CompilerSettings for SolcSettings { } } +impl SourceParser for SolParser { + type ParsedSource = SolData; + + fn new(config: &crate::ProjectPathsConfig) -> Self { + Self { + compiler: solar_sema::Compiler::new(Self::session_with_opts( + solar_sema::interface::config::Opts { + include_paths: config.include_paths.iter().cloned().collect(), + base_path: Some(config.root.clone()), + import_remappings: config + .remappings + .iter() + .map(|r| solar_sema::interface::config::ImportRemapping { + context: r.context.clone().unwrap_or_default(), + prefix: r.name.clone(), + path: r.path.clone(), + }) + .collect(), + ..Default::default() + }, + )), + } + } + + fn read(&mut self, path: &Path) -> Result> { + let mut sources = Sources::from_iter([(path.to_path_buf(), Source::read_(path)?)]); + let nodes = self.parse_sources(&mut sources)?; + debug_assert_eq!(nodes.len(), 1, "{nodes:#?}"); + Ok(nodes.into_iter().next().unwrap().1) + } + + fn parse_sources( + &mut self, + sources: &mut Sources, + ) -> Result)>> { + self.compiler_mut().enter_mut(|compiler| { + let mut pcx = compiler.parse(); + let files = sources + .par_iter() + .map(|(path, source)| { + pcx.sess + .source_map() + .new_source_file(path.clone(), source.content.as_str()) + .map_err(|e| SolcError::Io(SolcIoError::new(e, path))) + }) + .collect::>>()?; + pcx.add_files(files); + pcx.parse(); + + let parsed = sources.par_iter().map(|(path, source)| { + let sf = compiler.sess().source_map().get_file(path).unwrap(); + let (_, s) = compiler.gcx().sources.get_file(&sf).unwrap(); + let node = Node::new( + path.clone(), + source.clone(), + SolData::parse_from(compiler.gcx().sess, s), + ); + (path.clone(), node) + }); + let mut parsed = parsed.collect::>(); + + // Set error on the first successful source, if any. This doesn't really have to be + // exact, as long as at least one source has an error set it should be enough. + if let Some(Err(diag)) = compiler.gcx().sess.emitted_errors() { + if let Some(idx) = parsed + .iter() + .position(|(_, node)| node.data.parse_result.is_ok()) + .or_else(|| parsed.first().map(|_| 0)) + { + let (_, node) = &mut parsed[idx]; + node.data.parse_result = Err(diag.to_string()); + } + } + + for (path, node) in &parsed { + if let Err(e) = &node.data.parse_result { + debug!("failed parsing {}: {e}", path.display()); + } + } + + Ok(parsed) + }) + } +} + impl ParsedSource for SolData { type Language = SolcLanguage; diff --git a/crates/compilers/src/compilers/vyper/mod.rs b/crates/compilers/src/compilers/vyper/mod.rs index 6d85c499a..88034cb0a 100644 --- a/crates/compilers/src/compilers/vyper/mod.rs +++ b/crates/compilers/src/compilers/vyper/mod.rs @@ -1,6 +1,7 @@ -use self::{input::VyperVersionedInput, parser::VyperParsedSource}; +use self::input::VyperVersionedInput; use super::{Compiler, CompilerOutput, Language}; pub use crate::artifacts::vyper::{VyperCompilationError, VyperInput, VyperOutput, VyperSettings}; +use crate::parser::VyperParser; use core::fmt; use foundry_compilers_artifacts::{sources::Source, Contract}; use foundry_compilers_core::error::{Result, SolcError}; @@ -26,7 +27,7 @@ pub const VYPER_EXTENSIONS: &[&str] = &["vy", "vyi"]; pub const VYPER_INTERFACE_EXTENSION: &str = "vyi"; /// Vyper language, used as [Compiler::Language] for the Vyper compiler. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] #[non_exhaustive] pub struct VyperLanguage; @@ -201,7 +202,7 @@ impl Vyper { impl Compiler for Vyper { type Settings = VyperSettings; type CompilationError = VyperCompilationError; - type ParsedSource = VyperParsedSource; + type Parser = VyperParser; type Input = VyperVersionedInput; type Language = VyperLanguage; type CompilerContract = Contract; diff --git a/crates/compilers/src/compilers/vyper/parser.rs b/crates/compilers/src/compilers/vyper/parser.rs index 2f524ff6b..d3602b077 100644 --- a/crates/compilers/src/compilers/vyper/parser.rs +++ b/crates/compilers/src/compilers/vyper/parser.rs @@ -1,7 +1,7 @@ use super::VyperLanguage; use crate::{ compilers::{vyper::VYPER_EXTENSIONS, ParsedSource}, - ProjectPathsConfig, + ProjectPathsConfig, SourceParser, }; use foundry_compilers_core::{ error::{Result, SolcError}, @@ -26,6 +26,19 @@ pub struct VyperImport { pub final_part: Option, } +#[derive(Clone, Debug, Default)] +pub struct VyperParser { + _inner: (), +} + +impl SourceParser for VyperParser { + type ParsedSource = VyperParsedSource; + + fn new(_config: &ProjectPathsConfig) -> Self { + Self { _inner: () } + } +} + #[derive(Clone, Debug)] pub struct VyperParsedSource { path: PathBuf, diff --git a/crates/compilers/src/config.rs b/crates/compilers/src/config.rs index 45dfd8dd7..3f3b5f451 100644 --- a/crates/compilers/src/config.rs +++ b/crates/compilers/src/config.rs @@ -2,7 +2,7 @@ use crate::{ cache::SOLIDITY_FILES_CACHE_FILENAME, compilers::{multi::MultiCompilerLanguage, Language}, flatten::{collect_ordered_deps, combine_version_pragmas}, - resolver::{parse::SolData, SolImportAlias}, + resolver::{parse::SolParser, SolImportAlias}, Graph, }; use foundry_compilers_artifacts::{ @@ -110,7 +110,7 @@ impl ProjectPathsConfig { } let sources = Source::read_all_files(input_files)?; - let graph = Graph::::resolve_sources(self, sources)?; + let graph = Graph::::resolve_sources(self, sources)?; let ordered_deps = collect_ordered_deps(&flatten_target, self, &graph)?; #[cfg(windows)] @@ -549,36 +549,14 @@ impl ProjectPathsConfig { } } - pub fn with_language(self) -> ProjectPathsConfig { - let Self { - root, - cache, - artifacts, - build_infos, - sources, - tests, - scripts, - libraries, - remappings, - include_paths, - allowed_paths, - _l, - } = self; + pub fn with_language_ref(&self) -> &ProjectPathsConfig { + // SAFETY: `Lang` is `PhantomData`. + unsafe { std::mem::transmute(self) } + } - ProjectPathsConfig { - root, - cache, - artifacts, - build_infos, - sources, - tests, - scripts, - libraries, - remappings, - include_paths, - allowed_paths, - _l: PhantomData, - } + pub fn with_language(self) -> ProjectPathsConfig { + // SAFETY: `Lang` is `PhantomData`. + unsafe { std::mem::transmute(self) } } pub fn apply_lib_remappings(&self, mut libraries: Libraries) -> Libraries { diff --git a/crates/compilers/src/filter.rs b/crates/compilers/src/filter.rs index 979b1e5be..aa25914a7 100644 --- a/crates/compilers/src/filter.rs +++ b/crates/compilers/src/filter.rs @@ -3,7 +3,7 @@ use crate::{ compilers::{multi::MultiCompilerParsedSource, CompilerSettings, ParsedSource}, resolver::{parse::SolData, GraphEdges}, - Sources, + SourceParser, Sources, }; use foundry_compilers_artifacts::output_selection::OutputSelection; use std::{ @@ -101,11 +101,11 @@ impl<'a> SparseOutputFilter<'a> { /// /// This also takes the project's graph as input, this allows us to check if the files the /// filter matches depend on libraries that need to be linked - pub fn sparse_sources( + pub fn sparse_sources( &self, sources: &Sources, settings: &mut S, - graph: &GraphEdges, + graph: &GraphEdges

, ) -> Vec { let mut full_compilation: HashSet = sources .dirty_files() diff --git a/crates/compilers/src/flatten.rs b/crates/compilers/src/flatten.rs index de629cca7..04a4564b6 100644 --- a/crates/compilers/src/flatten.rs +++ b/crates/compilers/src/flatten.rs @@ -3,7 +3,7 @@ use crate::{ compilers::{Compiler, ParsedSource}, filter::MaybeSolData, resolver::parse::SolData, - ArtifactOutput, CompilerSettings, Graph, Project, ProjectPathsConfig, Updates, + ArtifactOutput, CompilerSettings, Graph, Project, ProjectPathsConfig, SourceParser, Updates, }; use foundry_compilers_artifacts::{ ast::{visitor::Visitor, *}, @@ -192,7 +192,7 @@ impl Flattener { target: &Path, ) -> std::result::Result where - C::ParsedSource: MaybeSolData, + C::Parser: SourceParser, { // Configure project to compile the target file and only request AST for target file. project.cached = false; @@ -210,7 +210,7 @@ impl Flattener { let output = output.compiler_output; let sources = Source::read_all_files(vec![target.to_path_buf()])?; - let graph = Graph::::resolve_sources(&project.paths, sources)?; + let graph = Graph::::resolve_sources(&project.paths, sources)?; let ordered_sources = collect_ordered_deps(target, &project.paths, &graph)?; @@ -794,10 +794,10 @@ impl Flattener { } /// Performs DFS to collect all dependencies of a target -fn collect_deps( +fn collect_deps>( path: &Path, - paths: &ProjectPathsConfig, - graph: &Graph, + paths: &ProjectPathsConfig<::Language>, + graph: &Graph

, deps: &mut HashSet, ) -> Result<()> { if deps.insert(path.to_path_buf()) { @@ -830,10 +830,10 @@ fn collect_deps( /// Instead, we sort files by the number of their dependencies (imports of any depth) in ascending /// order. If files have the same number of dependencies, we sort them alphabetically. /// Target file is always placed last. -pub fn collect_ordered_deps( +pub fn collect_ordered_deps>( path: &Path, - paths: &ProjectPathsConfig, - graph: &Graph, + paths: &ProjectPathsConfig<::Language>, + graph: &Graph

, ) -> Result> { let mut deps = HashSet::new(); collect_deps(path, paths, graph, &mut deps)?; diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index bd4bc98ac..ad0ee4b7f 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -64,8 +64,10 @@ use foundry_compilers_core::error::{Result, SolcError, SolcIoError}; use output::sources::{VersionedSourceFile, VersionedSourceFiles}; use project::ProjectCompiler; use semver::Version; -use solar_parse::Parser; -use solar_sema::interface::{diagnostics::EmittedDiagnostics, source_map::FileName, Session}; +use solar_parse::{ + interface::{diagnostics::EmittedDiagnostics, source_map::FileName, Session}, + Parser, +}; use solc::SolcSettings; use std::{ collections::{BTreeMap, BTreeSet, HashMap, HashSet}, @@ -173,7 +175,7 @@ where /// Returns standard-json-input to compile the target contract pub fn standard_json_input(&self, target: &Path) -> Result { trace!(?target, "Building standard-json-input"); - let graph = Graph::::resolve(&self.paths)?; + let graph = Graph::::resolve(&self.paths)?; let target_index = graph.files().get(target).ok_or_else(|| { SolcError::msg(format!("cannot resolve file at {:?}", target.display())) })?; @@ -389,7 +391,7 @@ impl, C: Compiler> Pro T: Clone, C: Clone, { - let graph = Graph::::resolve(&self.paths)?; + let graph = Graph::::resolve(&self.paths)?; let mut contracts: HashMap> = HashMap::new(); if !graph.is_empty() { for node in &graph.nodes { @@ -923,15 +925,15 @@ pub fn replace_source_content( pub(crate) fn parse_one_source( content: &str, path: &Path, - f: impl FnOnce(solar_sema::ast::SourceUnit<'_>) -> R, + f: impl FnOnce(&Session, &solar_parse::ast::SourceUnit<'_>) -> R, ) -> Result { let sess = Session::builder().with_buffer_emitter(Default::default()).build(); - let res = sess.enter(|| -> solar_parse::interface::Result<_> { + let res = sess.enter_sequential(|| -> solar_parse::interface::Result<_> { let arena = solar_parse::ast::Arena::new(); let filename = FileName::Real(path.to_path_buf()); let mut parser = Parser::from_source_code(&sess, &arena, filename, content.to_string())?; let ast = parser.parse_file().map_err(|e| e.emit())?; - Ok(f(ast)) + Ok(f(&sess, &ast)) }); // Return if any diagnostics emitted during content parsing. @@ -946,11 +948,10 @@ pub(crate) fn parse_one_source( #[cfg(test)] #[cfg(feature = "svm-solc")] mod tests { + use super::*; use foundry_compilers_artifacts::Remapping; use foundry_compilers_core::utils::{self, mkdir_or_touch, tempdir}; - use super::*; - #[test] #[cfg_attr(windows, ignore = "<0.7 solc is flaky")] fn test_build_all_versions() { diff --git a/crates/compilers/src/project_util/mock.rs b/crates/compilers/src/project_util/mock.rs index 0e9b843fc..0964cb769 100644 --- a/crates/compilers/src/project_util/mock.rs +++ b/crates/compilers/src/project_util/mock.rs @@ -10,9 +10,8 @@ use std::{ }; use crate::{ - compilers::{multi::MultiCompilerParsedSource, Language, ParsedSource}, - resolver::GraphEdges, - Graph, ProjectPathsConfig, + compilers::Language, multi::MultiCompilerParser, resolver::GraphEdges, Graph, + ProjectPathsConfig, SourceParser, }; /// Represents the layout of a project @@ -51,9 +50,9 @@ impl MockProjectGenerator { } /// Create a skeleton of a real project - pub fn create(paths: &ProjectPathsConfig) -> Result { - fn get_libs( - edges: &GraphEdges, + pub fn create(paths: &ProjectPathsConfig) -> Result { + fn get_libs( + edges: &GraphEdges

, lib_folder: &Path, ) -> Option>> { let mut libs: HashMap<_, Vec<_>> = HashMap::new(); @@ -65,7 +64,7 @@ impl MockProjectGenerator { Some(libs) } - let graph = Graph::::resolve(paths)?; + let graph = Graph::::resolve(paths)?; let mut generated = Self::default(); let (_, edges) = graph.into_sources(); diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 52157d530..feb4abc14 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -46,18 +46,17 @@ //! which is defined on a per source file basis. use crate::{ - compilers::{Compiler, CompilerVersion, Language, ParsedSource}, + compilers::{Compiler, CompilerVersion, ParsedSource}, project::VersionedSources, - ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, + resolver::parse::SolParser, + ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, SourceParser, }; use core::fmt; use foundry_compilers_artifacts::sources::{Source, Sources}; use foundry_compilers_core::{ error::{Result, SolcError}, - utils::{self, find_case_sensitive_existing_file}, + utils, }; -use parse::SolData; -use rayon::prelude::*; use semver::{Version, VersionReq}; use std::{ collections::{BTreeSet, HashMap, HashSet, VecDeque}, @@ -89,15 +88,15 @@ pub struct ResolvedSources<'a, C: Compiler> { /// a profile suffix. pub primary_profiles: HashMap, /// Graph edges. - pub edges: GraphEdges, + pub edges: GraphEdges, } /// The underlying edges of the graph which only contains the raw relationship data. /// /// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources` /// set is determined. -#[derive(Debug)] -pub struct GraphEdges { +#[derive(Clone, Debug)] +pub struct GraphEdges { /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` /// is the set of outgoing edges for `nodes[0]`. edges: Vec>, @@ -109,8 +108,10 @@ pub struct GraphEdges { rev_indices: HashMap, /// the identified version requirement of a file versions: HashMap>, - /// the extracted data from the source file - data: HashMap, + /// the extracted data from the source files + data: Vec, + /// The parser which parsed `data`. + parser: Option

, /// with how many input files we started with, corresponds to `let input_files = /// nodes[..num_input_files]`. /// @@ -127,7 +128,34 @@ pub struct GraphEdges { resolved_solc_include_paths: BTreeSet, } -impl GraphEdges { +impl Default for GraphEdges

{ + fn default() -> Self { + Self { + edges: Default::default(), + rev_edges: Default::default(), + indices: Default::default(), + rev_indices: Default::default(), + versions: Default::default(), + data: Default::default(), + parser: Default::default(), + num_input_files: Default::default(), + unresolved_imports: Default::default(), + resolved_solc_include_paths: Default::default(), + } + } +} + +impl GraphEdges

{ + /// Returns the parser used to parse the sources. + pub fn parser(&self) -> &P { + self.parser.as_ref().unwrap() + } + + /// Returns the parser used to parse the sources. + pub fn parser_mut(&mut self) -> &mut P { + self.parser.as_mut().unwrap() + } + /// How many files are source files pub fn num_source_files(&self) -> usize { self.num_input_files @@ -212,8 +240,11 @@ impl GraphEdges { } /// Returns the parsed source data for the given file - pub fn get_parsed_source(&self, file: &Path) -> Option<&D> { - self.indices.get(file).and_then(|idx| self.data.get(idx)) + pub fn get_parsed_source(&self, file: &Path) -> Option<&P::ParsedSource> + where + P: SourceParser, + { + self.indices.get(file).and_then(|idx| self.data.get(*idx)) } } @@ -223,16 +254,23 @@ impl GraphEdges { /// /// See also #[derive(Debug)] -pub struct Graph { +pub struct Graph { /// all nodes in the project, a `Node` represents a single file - pub nodes: Vec>, + pub nodes: Vec>, /// relationship of the nodes - edges: GraphEdges, + edges: GraphEdges

, /// the root of the project this graph represents root: PathBuf, } -impl> Graph { +type L

= <

::ParsedSource as ParsedSource>::Language; + +impl Graph

{ + /// Returns the parser used to parse the sources. + pub fn parser(&self) -> &P { + self.edges.parser() + } + /// Print the graph to `StdOut` pub fn print(&self) { self.print_with_options(Default::default()) @@ -275,11 +313,11 @@ impl> Graph { /// # Panics /// /// if the `index` node id is not included in the graph - pub fn node(&self, index: usize) -> &Node { + pub fn node(&self, index: usize) -> &Node { &self.nodes[index] } - pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, D> { + pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, P::ParsedSource> { DisplayNode { node: self.node(index), root: &self.root } } @@ -294,11 +332,11 @@ impl> Graph { } /// Same as `Self::node_ids` but returns the actual `Node` - pub fn nodes(&self, start: usize) -> impl Iterator> + '_ { + pub fn nodes(&self, start: usize) -> impl Iterator> + '_ { self.node_ids(start).map(move |idx| self.node(idx)) } - fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges) { + fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges

) { let Self { nodes, mut edges, .. } = self; // need to move the extracted data to the edges, essentially splitting the node so we have // access to the data at a later stage in the compile pipeline @@ -306,7 +344,9 @@ impl> Graph { for (idx, node) in nodes.into_iter().enumerate() { let Node { path, source, data } = node; sources.push((path, source)); - edges.data.insert(idx, data); + let idx2 = edges.data.len(); + edges.data.push(data); + assert_eq!(idx, idx2); } (sources, edges) @@ -314,7 +354,7 @@ impl> Graph { /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and /// returning the `nodes` converted to `Sources` - pub fn into_sources(self) -> (Sources, GraphEdges) { + pub fn into_sources(self) -> (Sources, GraphEdges

) { let (sources, edges) = self.split(); (sources.into_iter().collect(), edges) } @@ -322,7 +362,7 @@ impl> Graph { /// Returns an iterator that yields only those nodes that represent input files. /// See `Self::resolve_sources` /// This won't yield any resolved library nodes - pub fn input_nodes(&self) -> impl Iterator> { + pub fn input_nodes(&self) -> impl Iterator> { self.nodes.iter().take(self.edges.num_input_files) } @@ -334,14 +374,15 @@ impl> Graph { /// Resolves a number of sources within the given config #[instrument(name = "Graph::resolve_sources", skip_all)] pub fn resolve_sources( - paths: &ProjectPathsConfig, - sources: Sources, + paths: &ProjectPathsConfig<::Language>, + mut sources: Sources, ) -> Result { /// checks if the given target path was already resolved, if so it adds its id to the list /// of resolved imports. If it hasn't been resolved yet, it queues in the file for /// processing - fn add_node( - unresolved: &mut VecDeque<(PathBuf, Node)>, + fn add_node( + parser: &mut P, + unresolved: &mut VecDeque<(PathBuf, Node)>, index: &mut HashMap, resolved_imports: &mut Vec, target: PathBuf, @@ -350,7 +391,7 @@ impl> Graph { resolved_imports.push(idx); } else { // imported file is not part of the input files - let node = Node::read(&target)?; + let node = parser.read(&target)?; unresolved.push_back((target.clone(), node)); let idx = index.len(); index.insert(target, idx); @@ -359,16 +400,11 @@ impl> Graph { Ok(()) } + let mut parser = P::new(paths.with_language_ref()); + // we start off by reading all input files, which includes all solidity files from the // source and test folder - let mut unresolved: VecDeque<_> = sources - .0 - .into_par_iter() - .map(|(path, source)| { - let data = D::parse(source.as_ref(), &path)?; - Ok((path.clone(), Node { path, source, data })) - }) - .collect::>()?; + let mut unresolved: VecDeque<_> = parser.parse_sources(&mut sources)?.into(); // identifiers of all resolved files let mut index: HashMap<_, _> = @@ -406,9 +442,14 @@ impl> Graph { &import_path, &mut resolved_solc_include_paths, ) { - Ok(import) => { - add_node(&mut unresolved, &mut index, &mut resolved_imports, import).err() - } + Ok(import) => add_node( + &mut parser, + &mut unresolved, + &mut index, + &mut resolved_imports, + import, + ) + .err(), Err(err) => Some(err), } { unresolved_imports.insert((import_path.to_path_buf(), node.path.clone())); @@ -452,6 +493,7 @@ impl> Graph { .map(|(idx, node)| (idx, node.data.version_req().cloned())) .collect(), data: Default::default(), + parser: Some(parser), unresolved_imports, resolved_solc_include_paths, }; @@ -459,12 +501,12 @@ impl> Graph { } /// Resolves the dependencies of a project's source contracts - pub fn resolve(paths: &ProjectPathsConfig) -> Result { + pub fn resolve( + paths: &ProjectPathsConfig<::Language>, + ) -> Result { Self::resolve_sources(paths, paths.read_input_files()?) } -} -impl> Graph { /// Consumes the nodes of the graph and returns all input files together with their appropriate /// version and the edges of the graph /// @@ -476,7 +518,7 @@ impl> Graph { ) -> Result> where T: ArtifactOutput, - C: Compiler, + C: Compiler::Language>, { /// insert the imports of the given node into the sources map /// There can be following graph: @@ -788,7 +830,7 @@ impl> Graph { Err(msg) } - fn input_nodes_by_language(&self) -> HashMap> { + fn input_nodes_by_language(&self) -> HashMap, Vec> { let mut nodes = HashMap::new(); for idx in 0..self.edges.num_input_files { @@ -808,13 +850,14 @@ impl> Graph { /// /// This also attempts to prefer local installations over remote available. /// If `offline` is set to `true` then only already installed. + #[allow(clippy::type_complexity)] fn get_input_node_versions< - C: Compiler, + C: Compiler>, T: ArtifactOutput, >( &self, project: &Project, - ) -> Result>>> { + ) -> Result, HashMap>>> { trace!("resolving input node versions"); let mut resulted_nodes = HashMap::new(); @@ -910,13 +953,13 @@ impl> Graph { #[allow(clippy::complexity)] fn resolve_settings< - C: Compiler, + C: Compiler>, T: ArtifactOutput, >( &self, project: &Project, - input_nodes_versions: HashMap>>, - ) -> Result>>>> { + input_nodes_versions: HashMap, HashMap>>, + ) -> Result, HashMap>>>> { let mut resulted_sources = HashMap::new(); let mut errors = Vec::new(); for (language, versions) in input_nodes_versions { @@ -1034,20 +1077,20 @@ impl> Graph { /// An iterator over a node and its dependencies #[derive(Debug)] -pub struct NodesIter<'a, D> { +pub struct NodesIter<'a, P: SourceParser> { /// stack of nodes stack: VecDeque, visited: HashSet, - graph: &'a GraphEdges, + graph: &'a GraphEdges

, } -impl<'a, D> NodesIter<'a, D> { - fn new(start: usize, graph: &'a GraphEdges) -> Self { +impl<'a, P: SourceParser> NodesIter<'a, P> { + fn new(start: usize, graph: &'a GraphEdges

) -> Self { Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph } } } -impl Iterator for NodesIter<'_, D> { +impl Iterator for NodesIter<'_, P> { type Item = usize; fn next(&mut self) -> Option { let node = self.stack.pop_front()?; @@ -1061,38 +1104,35 @@ impl Iterator for NodesIter<'_, D> { } #[derive(Debug)] -pub struct Node { +pub struct Node { /// path of the solidity file path: PathBuf, /// content of the solidity file source: Source, /// parsed data - pub data: D, + pub data: S, } -impl Node { +impl Node { + pub fn new(path: PathBuf, source: Source, data: S) -> Self { + Self { path, source, data } + } + + pub fn map_data(self, f: impl FnOnce(S) -> T) -> Node { + Node::new(self.path, self.source, f(self.data)) + } +} + +impl Node { /// Reads the content of the file and returns a [Node] containing relevant information pub fn read(file: &Path) -> Result { - let source = Source::read(file).map_err(|err| { - let exists = err.path().exists(); - if !exists && err.path().is_symlink() { - SolcError::ResolveBadSymlink(err) - } else { - // This is an additional check useful on OS that have case-sensitive paths, See also - if !exists { - // check if there exists a file with different case - if let Some(existing_file) = find_case_sensitive_existing_file(file) { - SolcError::ResolveCaseSensitiveFileName { error: err, existing_file } - } else { - SolcError::Resolve(err) - } - } else { - SolcError::Resolve(err) - } - } - })?; - let data = D::parse(source.as_ref(), file)?; - Ok(Self { path: file.to_path_buf(), source, data }) + let source = Source::read_(file)?; + Self::parse(file, source) + } + + pub fn parse(file: &Path, source: Source) -> Result { + let data = S::parse(source.as_ref(), file)?; + Ok(Self::new(file.to_path_buf(), source, data)) } /// Returns the path of the file. @@ -1111,12 +1151,12 @@ impl Node { } /// Helper type for formatting a node -pub(crate) struct DisplayNode<'a, D> { - node: &'a Node, +pub(crate) struct DisplayNode<'a, S> { + node: &'a Node, root: &'a PathBuf, } -impl fmt::Display for DisplayNode<'_, D> { +impl fmt::Display for DisplayNode<'_, S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let path = utils::source_name(&self.node.path, self.root); write!(f, "{}", path.display())?; @@ -1148,7 +1188,7 @@ mod tests { let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); let paths = ProjectPathsConfig::hardhat(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); assert_eq!(graph.edges.num_input_files, 1); assert_eq!(graph.files().len(), 2); @@ -1167,7 +1207,7 @@ mod tests { let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); assert_eq!(graph.edges.num_input_files, 2); assert_eq!(graph.files().len(), 3); @@ -1190,26 +1230,31 @@ mod tests { } #[test] - #[cfg(not(target_os = "windows"))] fn can_print_dapp_sample_graph() { let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); let mut out = Vec::::new(); tree::print(&graph, &Default::default(), &mut out).unwrap(); - assert_eq!( - " + if !cfg!(windows) { + assert_eq!( + " src/Dapp.sol >=0.6.6 src/Dapp.t.sol >=0.6.6 ├── lib/ds-test/src/test.sol >=0.4.23 └── src/Dapp.sol >=0.6.6 " - .trim_start() - .as_bytes() - .to_vec(), - out - ); + .trim_start() + .as_bytes() + .to_vec(), + out + ); + } + + graph.edges.parser().compiler.enter(|c| { + assert_eq!(c.gcx().sources.len(), 3); + }); } #[test] @@ -1217,7 +1262,7 @@ src/Dapp.t.sol >=0.6.6 fn can_print_hardhat_sample_graph() { let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); let paths = ProjectPathsConfig::hardhat(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); let mut out = Vec::::new(); tree::print(&graph, &Default::default(), &mut out).unwrap(); assert_eq!( @@ -1236,7 +1281,7 @@ src/Dapp.t.sol >=0.6.6 let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas"); let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); let Err(SolcError::Message(err)) = graph.get_input_node_versions( &ProjectBuilder::::default() .paths(paths) diff --git a/crates/compilers/src/resolver/parse.rs b/crates/compilers/src/resolver/parse.rs index 8126c850f..26baf157a 100644 --- a/crates/compilers/src/resolver/parse.rs +++ b/crates/compilers/src/resolver/parse.rs @@ -1,11 +1,91 @@ use foundry_compilers_core::utils; use semver::VersionReq; use solar_parse::{ast, interface::sym}; +use solar_sema::interface; use std::{ ops::Range, path::{Path, PathBuf}, }; +/// Solidity parser. +/// +/// Holds a [`solar_sema::Compiler`] that is used to parse sources incrementally. +/// After project compilation ([`Graph::resolve`]), this will contain all sources parsed by +/// [`Graph`]. +/// +/// This state is currently lost on `Clone`. +/// +/// [`Graph`]: crate::Graph +/// [`Graph::resolve`]: crate::Graph::resolve +#[derive(derive_more::Debug)] +pub struct SolParser { + #[debug(ignore)] + pub(crate) compiler: solar_sema::Compiler, +} + +impl Clone for SolParser { + fn clone(&self) -> Self { + Self { + compiler: solar_sema::Compiler::new(Self::session_with_opts( + self.compiler.sess().opts.clone(), + )), + } + } +} + +impl SolParser { + /// Returns a reference to the compiler. + pub fn compiler(&self) -> &solar_sema::Compiler { + &self.compiler + } + + /// Returns a mutable reference to the compiler. + pub fn compiler_mut(&mut self) -> &mut solar_sema::Compiler { + &mut self.compiler + } + + /// Consumes the parser and returns the compiler. + pub fn into_compiler(self) -> solar_sema::Compiler { + self.compiler + } + + pub(crate) fn session_with_opts( + opts: solar_sema::interface::config::Opts, + ) -> solar_sema::interface::Session { + let sess = solar_sema::interface::Session::builder() + .with_buffer_emitter(Default::default()) + .opts(opts) + .build(); + sess.source_map().set_file_loader(FileLoader); + sess + } +} + +struct FileLoader; +impl interface::source_map::FileLoader for FileLoader { + fn canonicalize_path(&self, path: &Path) -> std::io::Result { + interface::source_map::RealFileLoader.canonicalize_path(path) + } + + fn load_stdin(&self) -> std::io::Result { + interface::source_map::RealFileLoader.load_stdin() + } + + fn load_file(&self, path: &Path) -> std::io::Result { + interface::source_map::RealFileLoader.load_file(path).map(|s| { + if s.contains('\r') { + s.replace('\r', "") + } else { + s + } + }) + } + + fn load_binary_file(&self, path: &Path) -> std::io::Result> { + interface::source_map::RealFileLoader.load_binary_file(path) + } +} + /// Represents various information about a Solidity file. #[derive(Clone, Debug)] #[non_exhaustive] @@ -44,111 +124,26 @@ impl SolData { /// parsing fails, we'll fall back to extract that info via regex #[instrument(name = "SolData::parse", skip_all)] pub fn parse(content: &str, file: &Path) -> Self { - let is_yul = file.extension().is_some_and(|ext| ext == "yul"); - let mut version = None; - let mut experimental = None; - let mut imports = Vec::>::new(); - let mut libraries = Vec::new(); - let mut contract_names = Vec::new(); - let mut parse_result = Ok(()); - - let result = crate::parse_one_source(content, file, |ast| { - for item in ast.items.iter() { - let loc = item.span.lo().to_usize()..item.span.hi().to_usize(); - match &item.kind { - ast::ItemKind::Pragma(pragma) => match &pragma.tokens { - ast::PragmaTokens::Version(name, req) if name.name == sym::solidity => { - version = Some(Spanned::new(req.to_string(), loc)); - } - ast::PragmaTokens::Custom(name, value) - if name.as_str() == "experimental" => - { - let value = - value.as_ref().map(|v| v.as_str().to_string()).unwrap_or_default(); - experimental = Some(Spanned::new(value, loc)); - } - _ => {} - }, - - ast::ItemKind::Import(import) => { - let path = import.path.value.to_string(); - let aliases = match &import.items { - ast::ImportItems::Plain(None) => &[][..], - ast::ImportItems::Plain(Some(alias)) - | ast::ImportItems::Glob(alias) => &[(*alias, None)][..], - ast::ImportItems::Aliases(aliases) => aliases, - }; - let sol_import = SolImport::new(PathBuf::from(path)).set_aliases( - aliases - .iter() - .map(|(id, alias)| match alias { - Some(al) => SolImportAlias::Contract( - al.name.to_string(), - id.name.to_string(), - ), - None => SolImportAlias::File(id.name.to_string()), - }) - .collect(), - ); - imports.push(Spanned::new(sol_import, loc)); - } - - ast::ItemKind::Contract(contract) => { - if contract.kind.is_library() { - libraries.push(SolLibrary { is_inlined: library_is_inlined(contract) }); - } - contract_names.push(contract.name.to_string()); - } - - _ => {} - } - } - }); - if let Err(e) = result { - let e = e.to_string(); - trace!("failed parsing {file:?}: {e}"); - parse_result = Err(e); - - if version.is_none() { - version = utils::capture_outer_and_inner( - content, - &utils::RE_SOL_PRAGMA_VERSION, - &["version"], - ) - .first() - .map(|(cap, name)| Spanned::new(name.as_str().to_owned(), cap.range())); - } - if imports.is_empty() { - imports = capture_imports(content); - } - if contract_names.is_empty() { - utils::RE_CONTRACT_NAMES.captures_iter(content).for_each(|cap| { - contract_names.push(cap[1].to_owned()); - }); + match crate::parse_one_source(content, file, |sess, ast| { + SolDataBuilder::parse(content, file, Ok((sess, ast))) + }) { + Ok(data) => data, + Err(e) => { + let e = e.to_string(); + trace!("failed parsing {file:?}: {e}"); + SolDataBuilder::parse(content, file, Err(Some(e))) } } - let license = content.lines().next().and_then(|line| { - utils::capture_outer_and_inner( - line, - &utils::RE_SOL_SDPX_LICENSE_IDENTIFIER, - &["license"], - ) - .first() - .map(|(cap, l)| Spanned::new(l.as_str().to_owned(), cap.range())) - }); - let version_req = version.as_ref().and_then(|v| Self::parse_version_req(v.data()).ok()); + } - Self { - version_req, - version, - experimental, - imports, - license, - libraries, - contract_names, - is_yul, - parse_result, - } + pub(crate) fn parse_from( + sess: &solar_sema::interface::Session, + s: &solar_sema::Source<'_>, + ) -> Self { + let content = s.file.src.as_str(); + let file = s.file.name.as_real().unwrap(); + let ast = s.ast.as_ref().map(|ast| (sess, ast)).ok_or(None); + SolDataBuilder::parse(content, file, ast) } /// Parses the version pragma and returns the corresponding SemVer version requirement. @@ -169,7 +164,7 @@ impl SolData { // Somehow, Solidity semver without an operator is considered to be "exact", // but lack of operator automatically marks the operator as Caret, so we need // to manually patch it? :shrug: - let exact = !matches!(&version[0..1], "*" | "^" | "=" | ">" | "<" | "~"); + let exact = !matches!(version.get(..1), Some("*" | "^" | "=" | ">" | "<" | "~")); let mut version = VersionReq::parse(&version)?; if exact { version.comparators[0].op = semver::Op::Exact; @@ -179,6 +174,141 @@ impl SolData { } } +#[derive(Default)] +struct SolDataBuilder { + version: Option>, + experimental: Option>, + imports: Vec>, + libraries: Vec, + contract_names: Vec, + parse_err: Option, +} + +impl SolDataBuilder { + fn parse( + content: &str, + file: &Path, + ast: Result< + (&solar_sema::interface::Session, &solar_parse::ast::SourceUnit<'_>), + Option, + >, + ) -> SolData { + let mut builder = Self::default(); + match ast { + Ok((sess, ast)) => builder.parse_from_ast(sess, ast), + Err(err) => { + builder.parse_from_regex(content); + if let Some(err) = err { + builder.parse_err = Some(err); + } + } + } + builder.build(content, file) + } + + fn parse_from_ast( + &mut self, + sess: &solar_sema::interface::Session, + ast: &solar_parse::ast::SourceUnit<'_>, + ) { + for item in ast.items.iter() { + let loc = sess.source_map().span_to_source(item.span).unwrap().1; + match &item.kind { + ast::ItemKind::Pragma(pragma) => match &pragma.tokens { + ast::PragmaTokens::Version(name, req) if name.name == sym::solidity => { + self.version = Some(Spanned::new(req.to_string(), loc)); + } + ast::PragmaTokens::Custom(name, value) if name.as_str() == "experimental" => { + let value = + value.as_ref().map(|v| v.as_str().to_string()).unwrap_or_default(); + self.experimental = Some(Spanned::new(value, loc)); + } + _ => {} + }, + + ast::ItemKind::Import(import) => { + let path = import.path.value.to_string(); + let aliases = match &import.items { + ast::ImportItems::Plain(None) => &[][..], + ast::ImportItems::Plain(Some(alias)) | ast::ImportItems::Glob(alias) => { + &[(*alias, None)][..] + } + ast::ImportItems::Aliases(aliases) => aliases, + }; + let sol_import = SolImport::new(PathBuf::from(path)).set_aliases( + aliases + .iter() + .map(|(id, alias)| match alias { + Some(al) => SolImportAlias::Contract( + al.name.to_string(), + id.name.to_string(), + ), + None => SolImportAlias::File(id.name.to_string()), + }) + .collect(), + ); + self.imports.push(Spanned::new(sol_import, loc)); + } + + ast::ItemKind::Contract(contract) => { + if contract.kind.is_library() { + self.libraries + .push(SolLibrary { is_inlined: library_is_inlined(contract) }); + } + self.contract_names.push(contract.name.to_string()); + } + + _ => {} + } + } + } + + fn parse_from_regex(&mut self, content: &str) { + if self.version.is_none() { + self.version = utils::capture_outer_and_inner( + content, + &utils::RE_SOL_PRAGMA_VERSION, + &["version"], + ) + .first() + .map(|(cap, name)| Spanned::new(name.as_str().to_owned(), cap.range())); + } + if self.imports.is_empty() { + self.imports = capture_imports(content); + } + if self.contract_names.is_empty() { + utils::RE_CONTRACT_NAMES.captures_iter(content).for_each(|cap| { + self.contract_names.push(cap[1].to_owned()); + }); + } + } + + fn build(self, content: &str, file: &Path) -> SolData { + let Self { version, experimental, imports, libraries, contract_names, parse_err } = self; + let license = content.lines().next().and_then(|line| { + utils::capture_outer_and_inner( + line, + &utils::RE_SOL_SDPX_LICENSE_IDENTIFIER, + &["license"], + ) + .first() + .map(|(cap, l)| Spanned::new(l.as_str().to_owned(), cap.range())) + }); + let version_req = version.as_ref().and_then(|v| SolData::parse_version_req(v.data()).ok()); + SolData { + license, + version, + experimental, + imports, + version_req, + libraries, + contract_names, + is_yul: file.extension().is_some_and(|ext| ext == "yul"), + parse_result: parse_err.map(Err).unwrap_or(Ok(())), + } + } +} + #[derive(Clone, Debug)] pub struct SolImport { path: PathBuf, diff --git a/crates/compilers/src/resolver/tree.rs b/crates/compilers/src/resolver/tree.rs index ca7309852..6ffdf1f6a 100644 --- a/crates/compilers/src/resolver/tree.rs +++ b/crates/compilers/src/resolver/tree.rs @@ -1,4 +1,4 @@ -use crate::{compilers::ParsedSource, Graph}; +use crate::{Graph, SourceParser}; use std::{collections::HashSet, io, io::Write, str::FromStr}; #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] @@ -46,8 +46,8 @@ static UTF8_SYMBOLS: Symbols = Symbols { down: "│", tee: "├", ell: "└", ri static ASCII_SYMBOLS: Symbols = Symbols { down: "|", tee: "|", ell: "`", right: "-" }; -pub fn print( - graph: &Graph, +pub fn print( + graph: &Graph

, opts: &TreeOptions, out: &mut dyn Write, ) -> io::Result<()> { @@ -83,8 +83,8 @@ pub fn print( } #[allow(clippy::too_many_arguments)] -fn print_node( - graph: &Graph, +fn print_node( + graph: &Graph

, node_index: usize, symbols: &Symbols, no_dedupe: bool, @@ -137,8 +137,8 @@ fn print_node( /// Prints all the imports of a node #[allow(clippy::too_many_arguments)] -fn print_imports( - graph: &Graph, +fn print_imports( + graph: &Graph, node_index: usize, symbols: &Symbols, no_dedupe: bool, diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index fcae49532..e5c28b1da 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -5,16 +5,14 @@ use foundry_compilers::{ buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ - multi::{ - MultiCompiler, MultiCompilerLanguage, MultiCompilerParsedSource, MultiCompilerSettings, - }, + multi::{MultiCompiler, MultiCompilerLanguage, MultiCompilerSettings}, solc::{Solc, SolcCompiler, SolcLanguage}, vyper::{Vyper, VyperLanguage, VyperSettings}, CompilerOutput, }, flatten::Flattener, info::ContractInfo, - multi::{MultiCompilerInput, MultiCompilerRestrictions}, + multi::{MultiCompilerInput, MultiCompilerParser, MultiCompilerRestrictions}, project::{Preprocessor, ProjectCompiler}, project_util::*, solc::{Restriction, SolcRestrictions, SolcSettings}, @@ -264,7 +262,7 @@ fn can_compile_dapp_detect_changes_in_libs() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),])); @@ -294,7 +292,7 @@ fn can_compile_dapp_detect_changes_in_libs() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); let compiled = project.compile().unwrap(); @@ -336,7 +334,7 @@ fn can_compile_dapp_detect_changes_in_sources() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),])); assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]); @@ -373,7 +371,7 @@ fn can_compile_dapp_detect_changes_in_sources() { ", ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); let compiled = project.compile().unwrap(); @@ -768,7 +766,6 @@ contract Contract { .unwrap(); let result = project.paths().clone().with_language::().flatten(target.as_path()); - assert!(result.is_err()); println!("{}", result.unwrap_err()); } @@ -3679,7 +3676,7 @@ fn can_add_basic_contract_and_library() { let lib = project.add_basic_source("Bar", "^0.8.0").unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert!(graph.files().contains_key(&src)); assert!(graph.files().contains_key(&lib));