Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
b683f5d
feat: add ParsedSources, store edges in output
DaniPopes Aug 14, 2025
394d1f2
chore: make it SourceParser
DaniPopes Aug 20, 2025
106770f
chore: renames, new methods
DaniPopes Aug 20, 2025
6a9a62c
chore: patch to git
DaniPopes Aug 20, 2025
0abe20f
Merge branch 'main' into dani/parsed-sources
DaniPopes Aug 20, 2025
23004cf
chore: clippy
DaniPopes Aug 20, 2025
2271483
fix: use source map, impl Clone with Default
DaniPopes Aug 20, 2025
cf3bd53
fix: set error on parse
DaniPopes Aug 20, 2025
e3c7ed1
chore: fix feature
DaniPopes Aug 20, 2025
01e3294
chore: rename assoc
DaniPopes Aug 20, 2025
cf98ace
rename
DaniPopes Aug 20, 2025
ab306d6
chore: rm unnecessary method
DaniPopes Aug 20, 2025
f592d14
docs: SolParser docs
DaniPopes Aug 21, 2025
f4f8692
push for ci
DaniPopes Aug 21, 2025
e90a85a
Update crates/compilers/src/compilers/multi.rs
DaniPopes Aug 22, 2025
c4bb4aa
bump
DaniPopes Aug 24, 2025
31458db
perf: load files in parallel
DaniPopes Aug 24, 2025
2c7d79b
feat: update, resolve imports while parsing
DaniPopes Aug 24, 2025
f1413c4
dbg
DaniPopes Aug 25, 2025
599164c
fix: strip \r
DaniPopes Aug 25, 2025
0dc50ef
undbg
DaniPopes Aug 25, 2025
a3845b0
undbg2
DaniPopes Aug 25, 2025
8f6202f
fix set error
DaniPopes Aug 25, 2025
32bab24
chore: log error
DaniPopes Aug 25, 2025
0d0bc9d
tmp: revert resolve imports
DaniPopes Aug 25, 2025
1f13864
Revert "tmp: revert resolve imports"
DaniPopes Aug 25, 2025
e404d59
chore: hide SolParser details
DaniPopes Aug 25, 2025
7d43c0a
bumpalo
DaniPopes Aug 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ tokio = { version = "1.47", features = ["rt-multi-thread"] }

snapbox = "0.6.21"

# [patch.crates-io]
# solar-parse = { git = "https://github.com/paradigmxyz/solar", branch = "main" }
# solar-sema = { git = "https://github.com/paradigmxyz/solar", branch = "main" }
# solar-ast = { git = "https://github.com/paradigmxyz/solar", branch = "main" }
# solar-interface = { git = "https://github.com/paradigmxyz/solar", branch = "main" }
[patch.crates-io]
solar-parse = { git = "https://github.com/paradigmxyz/solar", branch = "dani/compiler-interface" }
solar-sema = { git = "https://github.com/paradigmxyz/solar", branch = "dani/compiler-interface" }
solar-ast = { git = "https://github.com/paradigmxyz/solar", branch = "dani/compiler-interface" }
solar-interface = { git = "https://github.com/paradigmxyz/solar", branch = "dani/compiler-interface" }
26 changes: 25 additions & 1 deletion crates/artifacts/solc/src/sources.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use foundry_compilers_core::error::SolcIoError;
use foundry_compilers_core::error::{SolcError, SolcIoError};
use serde::{Deserialize, Serialize};
use std::{
collections::BTreeMap,
Expand Down Expand Up @@ -137,6 +137,30 @@ impl Source {
Ok(Self::new(content))
}

/// [`read`](Self::read) + mapping error to [`SolcError`].
pub fn read_(file: &Path) -> Result<Self, SolcError> {
Self::read(file).map_err(|err| {
let exists = err.path().exists();
if !exists && err.path().is_symlink() {
return SolcError::ResolveBadSymlink(err);
}

// This is an additional check useful on OS that have case-sensitive paths,
// see also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
// check if there exists a file with different case
#[cfg(feature = "walkdir")]
if !exists {
if let Some(existing_file) =
foundry_compilers_core::utils::find_case_sensitive_existing_file(file)
{
return SolcError::ResolveCaseSensitiveFileName { error: err, existing_file };
}
}

SolcError::Resolve(err)
})
}

/// Returns `true` if the source should be compiled with full output selection.
pub fn is_dirty(&self) -> bool {
self.kind.is_dirty()
Expand Down
35 changes: 21 additions & 14 deletions crates/compilers/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::{
output::Builds,
resolver::GraphEdges,
ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
ProjectPaths, ProjectPathsConfig, SourceCompilationKind,
ProjectPaths, ProjectPathsConfig, SourceCompilationKind, SourceParser,
};
use foundry_compilers_artifacts::{
sources::{Source, Sources},
Expand Down Expand Up @@ -658,7 +658,7 @@ pub(crate) struct ArtifactsCacheInner<
pub cached_builds: Builds<C::Language>,

/// Relationship between all the files.
pub edges: GraphEdges<C::ParsedSource>,
pub edges: GraphEdges<C::Parser>,

/// The project.
pub project: &'a Project<C, T>,
Expand Down Expand Up @@ -723,6 +723,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
/// Gets or calculates the interface representation hash for the given source file.
fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str {
self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| {
// TODO: use `interface_representation_ast` directly with `edges.parser()`.
if let Some(r) = interface_repr_hash(&source.content, file) {
return r;
}
Expand Down Expand Up @@ -823,10 +824,10 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>

// Walks over all cache entries, detects dirty files and removes them from cache.
fn find_and_remove_dirty(&mut self) {
fn populate_dirty_files<D>(
fn populate_dirty_files<P: SourceParser>(
file: &Path,
dirty_files: &mut HashSet<PathBuf>,
edges: &GraphEdges<D>,
edges: &GraphEdges<P>,
) {
for file in edges.importers(file) {
// If file is marked as dirty we either have already visited it or it was marked as
Expand Down Expand Up @@ -890,7 +891,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>

// Build a temporary graph for walking imports. We need this because `self.edges`
// only contains graph data for in-scope sources but we are operating on cache entries.
if let Ok(graph) = Graph::<C::ParsedSource>::resolve_sources(&self.project.paths, sources) {
if let Ok(graph) = Graph::<C::Parser>::resolve_sources(&self.project.paths, sources) {
let (sources, edges) = graph.into_sources();

// Calculate content hashes for later comparison.
Expand Down Expand Up @@ -1020,7 +1021,7 @@ pub(crate) enum ArtifactsCache<
C: Compiler,
> {
/// Cache nothing on disk
Ephemeral(GraphEdges<C::ParsedSource>, &'a Project<C, T>),
Ephemeral(GraphEdges<C::Parser>, &'a Project<C, T>),
/// Handles the actual cached artifacts, detects artifacts that can be reused
Cached(ArtifactsCacheInner<'a, T, C>),
}
Expand All @@ -1032,7 +1033,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
#[instrument(name = "ArtifactsCache::new", skip(project, edges))]
pub fn new(
project: &'a Project<C, T>,
edges: GraphEdges<C::ParsedSource>,
edges: GraphEdges<C::Parser>,
preprocessed: bool,
) -> Result<Self> {
/// Returns the [CompilerCache] to use
Expand Down Expand Up @@ -1117,7 +1118,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
}

/// Returns the graph data for this project
pub fn graph(&self) -> &GraphEdges<C::ParsedSource> {
pub fn graph(&self) -> &GraphEdges<C::Parser> {
match self {
ArtifactsCache::Ephemeral(graph, _) => graph,
ArtifactsCache::Cached(inner) => &inner.edges,
Expand Down Expand Up @@ -1191,18 +1192,22 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
///
/// Returns all the _cached_ artifacts.
#[instrument(name = "ArtifactsCache::consume", skip_all)]
#[allow(clippy::type_complexity)]
pub fn consume<A>(
self,
written_artifacts: &Artifacts<A>,
written_build_infos: &Vec<RawBuildInfo<C::Language>>,
write_to_disk: bool,
) -> Result<(Artifacts<A>, Builds<C::Language>)>
) -> Result<(Artifacts<A>, Builds<C::Language>, GraphEdges<C::Parser>)>
where
T: ArtifactOutput<Artifact = A>,
{
let ArtifactsCache::Cached(cache) = self else {
trace!("no cache configured, ephemeral");
return Ok(Default::default());
let cache = match self {
ArtifactsCache::Ephemeral(edges, _project) => {
trace!("no cache configured, ephemeral");
return Ok((Default::default(), Default::default(), edges));
}
ArtifactsCache::Cached(cache) => cache,
};

let ArtifactsCacheInner {
Expand All @@ -1212,7 +1217,9 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
dirty_sources,
sources_in_scope,
project,
..
edges,
content_hashes: _,
interface_repr_hashes: _,
} = cache;

// Remove cached artifacts which are out of scope, dirty or appear in `written_artifacts`.
Expand Down Expand Up @@ -1264,7 +1271,7 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
cache.write(project.cache_path())?;
}

Ok((cached_artifacts, cached_builds))
Ok((cached_artifacts, cached_builds, edges))
}

/// Marks the cached entry as seen by the compiler, if it's cached.
Expand Down
11 changes: 6 additions & 5 deletions crates/compilers/src/cache/iface.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{parse_one_source, replace_source_content};
use solar_sema::{
use solar_parse::{
ast::{self, Span},
interface::diagnostics::EmittedDiagnostics,
};
Expand All @@ -11,7 +11,7 @@ pub(crate) fn interface_repr_hash(content: &str, path: &Path) -> Option<String>
}

pub(crate) fn interface_repr(content: &str, path: &Path) -> Result<String, EmittedDiagnostics> {
parse_one_source(content, path, |ast| interface_representation_ast(content, &ast))
parse_one_source(content, path, |sess, ast| interface_representation_ast(content, sess, ast))
}

/// Helper function to remove parts of the contract which do not alter its interface:
Expand All @@ -21,6 +21,7 @@ pub(crate) fn interface_repr(content: &str, path: &Path) -> Result<String, Emitt
/// Preserves all libraries and interfaces.
pub(crate) fn interface_representation_ast(
content: &str,
sess: &solar_sema::interface::Session,
ast: &solar_parse::ast::SourceUnit<'_>,
) -> String {
let mut spans_to_remove: Vec<Span> = Vec::new();
Expand Down Expand Up @@ -57,9 +58,9 @@ pub(crate) fn interface_representation_ast(
}
}
}
let content =
replace_source_content(content, spans_to_remove.iter().map(|span| (span.to_range(), "")))
.replace("\n", "");
let updates =
spans_to_remove.iter().map(|&span| (sess.source_map().span_to_source(span).unwrap().1, ""));
let content = replace_source_content(content, updates).replace("\n", "");
crate::utils::RE_TWO_OR_MORE_SPACES.replace_all(&content, "").into_owned()
}

Expand Down
20 changes: 19 additions & 1 deletion crates/compilers/src/compile/output/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ use crate::{
compilers::{
multi::MultiCompiler, CompilationError, Compiler, CompilerContract, CompilerOutput,
},
resolver::GraphEdges,
Artifact, ArtifactId, ArtifactOutput, Artifacts, ConfigurableArtifacts,
};

Expand Down Expand Up @@ -62,7 +63,7 @@ impl<L> IntoIterator for Builds<L> {

/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still
/// need to be compiled.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, Default)]
pub struct ProjectCompileOutput<
C: Compiler = MultiCompiler,
T: ArtifactOutput<CompilerContract = C::CompilerContract> = ConfigurableArtifacts,
Expand All @@ -81,11 +82,23 @@ pub struct ProjectCompileOutput<
pub(crate) compiler_severity_filter: Severity,
/// all build infos that were just compiled
pub(crate) builds: Builds<C::Language>,
/// The relationship between the source files and their imports
pub(crate) edges: GraphEdges<C::Parser>,
}

impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
ProjectCompileOutput<C, T>
{
/// Returns the parser used to parse the sources.
pub fn parser(&self) -> &C::Parser {
self.edges.parser()
}

/// Returns the parser used to parse the sources.
pub fn parser_mut(&mut self) -> &mut C::Parser {
self.edges.parser_mut()
}

/// Converts all `\\` separators in _all_ paths to `/`
#[instrument(skip_all)]
pub fn slash_paths(&mut self) {
Expand Down Expand Up @@ -460,6 +473,11 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
pub fn builds(&self) -> impl Iterator<Item = (&String, &BuildContext<C::Language>)> {
self.builds.iter()
}

/// Returns the source graph of the project.
pub fn graph(&self) -> &GraphEdges<C::Parser> {
&self.edges
}
}

impl<C: Compiler, T: ArtifactOutput<CompilerContract = C::CompilerContract>>
Expand Down
5 changes: 3 additions & 2 deletions crates/compilers/src/compile/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ pub struct ProjectCompiler<
C: Compiler,
> {
/// Contains the relationship of the source files and their imports
edges: GraphEdges<C::ParsedSource>,
edges: GraphEdges<C::Parser>,
project: &'a Project<C, T>,
/// A mapping from a source file path to the primary profile name selected for it.
primary_profiles: HashMap<PathBuf, &'a str>,
Expand Down Expand Up @@ -381,7 +381,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
let skip_write_to_disk = project.no_artifacts || has_error;
trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file");

let (cached_artifacts, cached_builds) =
let (cached_artifacts, cached_builds, edges) =
cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?;

project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?;
Expand All @@ -404,6 +404,7 @@ impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
ignored_file_paths,
compiler_severity_filter,
builds,
edges,
})
}
}
Expand Down
30 changes: 27 additions & 3 deletions crates/compilers/src/compilers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::ProjectPathsConfig;
use crate::{resolver::Node, ProjectPathsConfig};
use alloy_json_abi::JsonAbi;
use core::fmt;
use foundry_compilers_artifacts::{
Expand All @@ -9,6 +9,7 @@ use foundry_compilers_artifacts::{
BytecodeObject, CompactContractRef, Contract, FileToContractsMap, Severity, SourceFile,
};
use foundry_compilers_core::error::Result;
use rayon::prelude::*;
use semver::{Version, VersionReq};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{
Expand Down Expand Up @@ -139,12 +140,35 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug {
fn strip_prefix(&mut self, base: &Path);
}

/// [`ParsedSource`] parser.
pub trait SourceParser: Clone + Default + Debug + Send + Sync {
type ParsedSource: ParsedSource;

fn read(&mut self, path: &Path) -> Result<Node<Self::ParsedSource>> {
Node::read(path)
}

fn parse_sources(
&mut self,
sources: &mut Sources,
) -> Result<Vec<(PathBuf, Node<Self::ParsedSource>)>> {
sources
.0
.par_iter()
.map(|(path, source)| {
let data = Self::ParsedSource::parse(source.as_ref(), path)?;
Ok((path.clone(), Node::new(path.clone(), source.clone(), data)))
})
.collect::<Result<_>>()
}
}

/// Parser of the source files which is used to identify imports and version requirements of the
/// given source.
///
/// Used by path resolver to resolve imports or determine compiler versions needed to compiler given
/// sources.
pub trait ParsedSource: Debug + Sized + Send + Clone {
pub trait ParsedSource: Clone + Debug + Sized + Send {
type Language: Language;

/// Parses the content of the source file.
Expand Down Expand Up @@ -331,7 +355,7 @@ pub trait Compiler: Send + Sync + Clone {
/// Output data for each contract
type CompilerContract: CompilerContract;
/// Source parser used for resolving imports and version requirements.
type ParsedSource: ParsedSource<Language = Self::Language>;
type Parser: SourceParser<ParsedSource: ParsedSource<Language = Self::Language>>;
/// Compiler settings.
type Settings: CompilerSettings;
/// Enum of languages supported by the compiler.
Expand Down
Loading
Loading