diff --git a/Cargo.lock b/Cargo.lock index 1039b52714cff..36080053979ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2637,7 +2637,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_json", - "solar-parse", + "solar-compiler", "strum 0.27.2", "time", "tracing", @@ -4065,8 +4065,7 @@ dependencies = [ "serde_json", "similar", "similar-asserts", - "solar-parse", - "solar-sema", + "solar-compiler", "soldeer-commands", "strum 0.27.2", "svm-rs", @@ -4130,11 +4129,8 @@ dependencies = [ "foundry-config", "heck", "rayon", - "solar-ast", - "solar-data-structures", + "solar-compiler", "solar-interface", - "solar-parse", - "solar-sema", "thiserror 2.0.16", ] @@ -4280,9 +4276,9 @@ dependencies = [ [[package]] name = "foundry-block-explorers" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc107bbc3b4480995fdf337ca0ddedc631728175f418d3136ead9df8f4dc465e" +checksum = "513f363cbc15c7fbf45a5ca1d0e92f93a0e4a7b622dce83d15fa5f171cd0b0e6" dependencies = [ "alloy-chains", "alloy-json-abi", @@ -4292,7 +4288,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.16", "tracing", ] @@ -4393,7 +4389,7 @@ dependencies = [ "rustls", "serde", "serde_json", - "solar-sema", + "solar-compiler", "strsim", "strum 0.27.2", "tempfile", @@ -4451,8 +4447,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_json", - "solar-parse", - "solar-sema", + "solar-compiler", "terminal_size", "thiserror 2.0.16", "tokio", @@ -4486,9 +4481,9 @@ dependencies = [ [[package]] name = "foundry-compilers" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1f8bc0a29630f1c4bbccd7a9555d72c232fef139f5ec3738fb78fff7c8992b0" +checksum = "54c6cc5c7952f069e5bf832fe82a329b297779b7d85352b14d5c2ae330172126" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -4521,9 +4516,9 @@ dependencies = [ [[package]] name = "foundry-compilers-artifacts" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8853af381b1a06a894a09c81e1eb1a1c3bdac063830342b091ec86b773c8b99c" +checksum = "f44f236dafbd7771d6acb9c06524caf47ac88aa85d620c894e7c1e19447f5bfa" dependencies = [ "foundry-compilers-artifacts-solc", "foundry-compilers-artifacts-vyper", @@ -4531,9 +4526,9 @@ dependencies = [ [[package]] name = "foundry-compilers-artifacts-solc" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7542a600e25ad18fb8782a3314f0f204b50313b14415c30f7caff5988bf7c3c" +checksum = "7d599b54d65a3a8988f4393e8184500e29ebdf73741429cbaffff8f9d63b472b" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -4554,9 +4549,9 @@ dependencies = [ [[package]] name = "foundry-compilers-artifacts-vyper" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f78a99e9ea726b506de49cf4efc162388a2a9968ef52ca3c25532cfaa97e77b0" +checksum = "769eda82ad26a17abe6134bc04b6d8083c06eab482db6e8a2553435935732d90" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -4569,9 +4564,9 @@ dependencies = [ [[package]] name = "foundry-compilers-core" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85bc378bf700fdecaacd4d1ecdc72ea80d9cb7f455184eb5cd34606570bd97ba" +checksum = "29a1357e5d8f2817d49cfa251c4bd10100bb4a88ee9755cbdf12f667d4d7831a" dependencies = [ "alloy-primitives", "cfg-if", @@ -4617,8 +4612,7 @@ dependencies = [ "serde", "serde_json", "similar-asserts", - "solar-interface", - "solar-parse", + "solar-compiler", "soldeer-core", "tempfile", "thiserror 2.0.16", @@ -4790,7 +4784,7 @@ dependencies = [ "revm-inspectors", "serde", "serde_json", - "solar-parse", + "solar-compiler", "tempfile", "tokio", "tracing", @@ -6353,12 +6347,6 @@ dependencies = [ "web_atoms", ] -[[package]] -name = "match_cfg" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" - [[package]] name = "match_token" version = "0.35.0" @@ -9133,37 +9121,51 @@ dependencies = [ [[package]] name = "solar-ast" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f7f30449c304fd09db4637209dc73bde7ec203e6e5c691fc9eed26b68cd105a" +checksum = "06a528b6d7fb62bb6b99db5dc4ea6d4b85ee2ff910a3e952c9c2fbc9c5b68a23" dependencies = [ "alloy-primitives", "bumpalo", "either", - "num-bigint", "num-rational", "semver 1.0.26", "solar-data-structures", "solar-interface", "solar-macros", "strum 0.27.2", - "typed-arena", +] + +[[package]] +name = "solar-compiler" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f726b10330517ecfa1e66e781ed55fb720ac02e3995453d14f6f90fe53ed8f63" +dependencies = [ + "alloy-primitives", + "solar-ast", + "solar-config", + "solar-data-structures", + "solar-interface", + "solar-macros", + "solar-parse", + "solar-sema", ] [[package]] name = "solar-config" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643ddf85ab917f5643ec47eb79ee6db6c6158cfaf7415e39840e154eecf7176f" +checksum = "0446799e12df8126895ec1721982c53df0149046e668cc7fc88efe27d1d59292" dependencies = [ "strum 0.27.2", ] [[package]] name = "solar-data-structures" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc21b4df6061e1c825c16faf8e1f16c2341f4c46a2b2a60e03069c4453fc5ac" +checksum = "4022a9a35fb9914f7162ea96f3c6bf00d8066018cc90cf7939b360b1009ce028" dependencies = [ "bumpalo", "index_vec", @@ -9176,9 +9178,9 @@ dependencies = [ [[package]] name = "solar-interface" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2571bfb2f54c5a24688afe6876682117e96f6fd35393398dbac43e5f6f7144" +checksum = "00512f582ca82c245e6f3d512db64a3c57f4c6cf164bfb578d3b6fc879a9f531" dependencies = [ "annotate-snippets", "anstream", @@ -9189,8 +9191,8 @@ dependencies = [ "inturn", "itertools 0.14.0", "itoa", - "match_cfg", "normalize-path", + "once_map", "rayon", "scoped-tls", "serde", @@ -9205,9 +9207,9 @@ dependencies = [ [[package]] name = "solar-macros" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca57257a3b6ef16bd7995d23da3e661e89cebdae6fb9e42e4331ba0f033bd1d" +checksum = "73ecd98081d2aa15f8747a484a772e059df5359c2936e8caa8fd18ecc51e4a8e" dependencies = [ "proc-macro2", "quote", @@ -9216,9 +9218,9 @@ dependencies = [ [[package]] name = "solar-parse" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "081b4d9e2ddd3c7bb90079b3eb253b957ef9eb5c860ed6d6a4068884ec3a8b85" +checksum = "247fbfe3c9f665c8c4bcc4e1a1123ca65213dab2b7dec77960068db8d6196c76" dependencies = [ "alloy-primitives", "bitflags 2.9.3", @@ -9228,6 +9230,7 @@ dependencies = [ "num-bigint", "num-rational", "num-traits", + "ruint", "smallvec", "solar-ast", "solar-data-structures", @@ -9237,9 +9240,9 @@ dependencies = [ [[package]] name = "solar-sema" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93b017d4017ee8324e669c6e5e6fe9a282ed07eb6171e5384ddd17b8a854766f" +checksum = "f1cc26771a9627566194b5fda0b8b3ff0f63ab71f028c7ed0d3cc07fb6b2e1f6" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -9260,7 +9263,6 @@ dependencies = [ "strum 0.27.2", "thread_local", "tracing", - "typed-arena", ] [[package]] @@ -10306,12 +10308,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "typed-arena" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" - [[package]] name = "typeid" version = "1.0.3" diff --git a/Cargo.toml b/Cargo.toml index 476e93b2d3ae1..163da0863791f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -203,15 +203,13 @@ foundry-wallets = { path = "crates/wallets" } foundry-linking = { path = "crates/linking" } # solc & compilation utilities -foundry-block-explorers = { version = "0.20.0", default-features = false } -foundry-compilers = { version = "0.18.3", default-features = false } +foundry-block-explorers = { version = "0.21.0", default-features = false } +foundry-compilers = { version = "0.19.0", default-features = false } foundry-fork-db = "0.18" solang-parser = { version = "=0.3.9", package = "foundry-solang-parser" } -solar-ast = { version = "=0.1.5", default-features = false } -solar-parse = { version = "=0.1.5", default-features = false } -solar-interface = { version = "=0.1.5", default-features = false } -solar-sema = { version = "=0.1.5", default-features = false } -solar-data-structures = { version = "=0.1.5", default-features = false } +solar = { package = "solar-compiler", version = "=0.1.6", default-features = false } +# TODO: remove in next solar release: https://github.com/paradigmxyz/solar/pull/444 +solar-interface = { version = "=0.1.6", default-features = false } ## alloy alloy-consensus = { version = "1.0.23", default-features = false } @@ -414,8 +412,4 @@ idna_adapter = "=1.1.0" # foundry-fork-db = { git = "https://github.com/foundry-rs/foundry-fork-db", rev = "eee6563" } ## solar -# solar-ast = { git = "https://github.com/paradigmxyz/solar.git", branch = "main" } -# solar-parse = { git = "https://github.com/paradigmxyz/solar.git", branch = "main" } -# solar-interface = { git = "https://github.com/paradigmxyz/solar.git", branch = "main" } -# solar-sema = { git = "https://github.com/paradigmxyz/solar.git", branch = "main" } -# solar-data-structures = { git = "https://github.com/paradigmxyz/solar.git", branch = "main" } +# solar = { package = "solar-compiler", git = "https://github.com/paradigmxyz/solar.git", branch = "main" } diff --git a/crates/chisel/Cargo.toml b/crates/chisel/Cargo.toml index 4695233162bed..deb89169ba364 100644 --- a/crates/chisel/Cargo.toml +++ b/crates/chisel/Cargo.toml @@ -49,7 +49,7 @@ semver.workspace = true serde_json.workspace = true serde.workspace = true solang-parser.workspace = true -solar-parse.workspace = true +solar.workspace = true strum = { workspace = true, features = ["derive"] } time = { version = "0.3", features = ["formatting"] } yansi.workspace = true diff --git a/crates/chisel/src/solidity_helper.rs b/crates/chisel/src/solidity_helper.rs index 1c588b5e60231..d06a82ce88bef 100644 --- a/crates/chisel/src/solidity_helper.rs +++ b/crates/chisel/src/solidity_helper.rs @@ -14,7 +14,7 @@ use rustyline::{ hint::Hinter, validate::{ValidationContext, ValidationResult, Validator}, }; -use solar_parse::{ +use solar::parse::{ Lexer, interface::Session, token::{Token, TokenKind}, @@ -173,7 +173,7 @@ impl SolidityHelper { /// Enters the session. fn enter(&self, f: impl FnOnce(&Session)) { - self.sess.enter(|| f(&self.sess)); + self.sess.enter_sequential(|| f(&self.sess)); } } @@ -238,7 +238,7 @@ impl Helper for SolidityHelper {} #[expect(non_upper_case_globals)] #[deny(unreachable_patterns)] fn token_style(token: &Token) -> Style { - use solar_parse::{ + use solar::parse::{ interface::kw::*, token::{TokenKind::*, TokenLitKind::*}, }; diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 19851f24ae59e..6302be8d0ee84 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -22,7 +22,7 @@ foundry-wallets.workspace = true foundry-block-explorers.workspace = true foundry-compilers = { workspace = true, features = ["full"] } -solar-sema.workspace = true +solar.workspace = true alloy-eips.workspace = true alloy-dyn-abi.workspace = true diff --git a/crates/cli/src/opts/build/mod.rs b/crates/cli/src/opts/build/mod.rs index 53e6d2d94e88d..c6e9ce6ed64d3 100644 --- a/crates/cli/src/opts/build/mod.rs +++ b/crates/cli/src/opts/build/mod.rs @@ -9,7 +9,7 @@ mod paths; pub use self::paths::ProjectPathOpts; mod utils; -pub use self::utils::{solar_pcx_from_build_opts, solar_pcx_from_solc_project}; +pub use self::utils::{configure_pcx, configure_pcx_from_solc}; // A set of solc compiler settings that can be set via command line arguments, which are intended // to be merged into an existing `foundry_config::Config`. diff --git a/crates/cli/src/opts/build/utils.rs b/crates/cli/src/opts/build/utils.rs index a85d7866df4be..43fb685ae945b 100644 --- a/crates/cli/src/opts/build/utils.rs +++ b/crates/cli/src/opts/build/utils.rs @@ -1,30 +1,29 @@ -use crate::{opts::BuildOpts, utils::LoadConfig}; - use eyre::Result; use foundry_compilers::{ CompilerInput, Graph, Project, artifacts::{Source, Sources}, - multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, + multi::{MultiCompilerLanguage, MultiCompilerParser}, solc::{SolcLanguage, SolcVersionedInput}, }; -use solar_sema::{ParsingContext, interface::Session}; +use foundry_config::Config; +use rayon::prelude::*; +use solar::sema::ParsingContext; use std::path::PathBuf; -/// Builds a Solar [`solar_sema::ParsingContext`] from [`BuildOpts`]. +/// Configures a [`ParsingContext`] from [`Config`]. /// -/// * Configures include paths, remappings and registers all in-memory sources so that solar can -/// operate without touching disk. -/// * If no `project` is provided, it will spin up a new ephemeral project. -/// * If no `target_paths` are provided, all project files are processed. -/// * Only processes the subset of sources with the most up-to-date Solitidy version. -pub fn solar_pcx_from_build_opts<'sess>( - sess: &'sess Session, - build: &BuildOpts, +/// - Configures include paths, remappings +/// - Source files are added if `add_source_file` is set +/// - If no `project` is provided, it will spin up a new ephemeral project. +/// - If no `target_paths` are provided, all project files are processed. +/// - Only processes the subset of sources with the most up-to-date Solidity version. +pub fn configure_pcx( + pcx: &mut ParsingContext<'_>, + config: &Config, project: Option<&Project>, target_paths: Option<&[PathBuf]>, -) -> Result> { +) -> Result<()> { // Process build options - let config = build.load_config()?; let project = match project { Some(project) => project, None => &config.ephemeral_project()?, @@ -46,7 +45,7 @@ pub fn solar_pcx_from_build_opts<'sess>( }; // Only process sources with latest Solidity version to avoid conflicts. - let graph = Graph::::resolve_sources(&project.paths, sources)?; + let graph = Graph::::resolve_sources(&project.paths, sources)?; let (version, sources, _) = graph // resolve graph into mapping language -> version -> sources .into_sources_by_version(project)? @@ -68,43 +67,48 @@ pub fn solar_pcx_from_build_opts<'sess>( version, ); - Ok(solar_pcx_from_solc_project(sess, project, &solc, true)) + configure_pcx_from_solc(pcx, project, &solc, true); + + Ok(()) } -/// Builds a Solar [`solar_sema::ParsingContext`] from a [`foundry_compilers::Project`] and a -/// [`SolcVersionedInput`]. +/// Configures a [`ParsingContext`] from a [`Project`] and [`SolcVersionedInput`]. /// -/// * Configures include paths, remappings. -/// * Source files can be manually added if the param `add_source_file` is set to `false`. -pub fn solar_pcx_from_solc_project<'sess>( - sess: &'sess Session, +/// - Configures include paths, remappings. +/// - Source files are added if `add_source_file` is set +pub fn configure_pcx_from_solc( + pcx: &mut ParsingContext<'_>, project: &Project, - solc: &SolcVersionedInput, + vinput: &SolcVersionedInput, add_source_files: bool, -) -> ParsingContext<'sess> { - // Configure the parsing context with the paths, remappings and sources - let mut pcx = ParsingContext::new(sess); +) { + configure_pcx_from_solc_cli(pcx, project, &vinput.cli_settings); + if add_source_files { + let sources = vinput + .input + .sources + .par_iter() + .filter_map(|(path, source)| { + pcx.sess.source_map().new_source_file(path.clone(), source.content.as_str()).ok() + }) + .collect::>(); + pcx.add_files(sources); + } +} +fn configure_pcx_from_solc_cli( + pcx: &mut ParsingContext<'_>, + project: &Project, + cli_settings: &foundry_compilers::solc::CliSettings, +) { pcx.file_resolver - .set_current_dir(solc.cli_settings.base_path.as_ref().unwrap_or(&project.paths.root)); + .set_current_dir(cli_settings.base_path.as_ref().unwrap_or(&project.paths.root)); for remapping in &project.paths.remappings { - pcx.file_resolver.add_import_remapping(solar_sema::interface::config::ImportRemapping { + pcx.file_resolver.add_import_remapping(solar::sema::interface::config::ImportRemapping { context: remapping.context.clone().unwrap_or_default(), prefix: remapping.name.clone(), path: remapping.path.clone(), }); } - pcx.file_resolver.add_include_paths(solc.cli_settings.include_paths.iter().cloned()); - - if add_source_files { - for (path, source) in &solc.input.sources { - if let Ok(src_file) = - sess.source_map().new_source_file(path.clone(), source.content.as_str()) - { - pcx.add_file(src_file); - } - } - } - - pcx + pcx.file_resolver.add_include_paths(cli_settings.include_paths.iter().cloned()); } diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 16e858b2ccc54..506d375977a1e 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -44,8 +44,7 @@ alloy-transport.workspace = true alloy-consensus = { workspace = true, features = ["k256"] } alloy-network.workspace = true -solar-parse.workspace = true -solar-sema.workspace = true +solar.workspace = true tower.workspace = true diff --git a/crates/common/src/comments/comment.rs b/crates/common/src/comments/comment.rs index b05a7bbdd1251..e299234a2ee10 100644 --- a/crates/common/src/comments/comment.rs +++ b/crates/common/src/comments/comment.rs @@ -1,6 +1,6 @@ //! Modified from [`rustc_ast::util::comments`](https://github.com/rust-lang/rust/blob/07d3fd1d9b9c1f07475b96a9d168564bf528db68/compiler/rustc_ast/src/util/comments.rs). -use solar_parse::{ +use solar::parse::{ ast::{CommentKind, Span}, interface::BytePos, }; diff --git a/crates/common/src/comments/comments.rs b/crates/common/src/comments/comments.rs index fb13654f811fc..28896cc77426d 100644 --- a/crates/common/src/comments/comments.rs +++ b/crates/common/src/comments/comments.rs @@ -1,5 +1,5 @@ use super::comment::{Comment, CommentStyle}; -use solar_parse::{ +use solar::parse::{ ast::{CommentKind, Span}, interface::{source_map::SourceFile, BytePos, CharPos, SourceMap}, lexer::token::RawTokenKind as TokenKind, @@ -87,7 +87,7 @@ fn gather_comments(sf: &SourceFile) -> Vec { } */ - for token in solar_parse::Cursor::new(&text[pos..]) { + for token in solar::parse::Cursor::new(&text[pos..]) { let token_range = pos..pos + token.len as usize; let span = make_span(token_range.clone()); let token_text = &text[token_range]; diff --git a/crates/common/src/comments/mod.rs b/crates/common/src/comments/mod.rs index 6ef9767e13883..b644b858b3f31 100644 --- a/crates/common/src/comments/mod.rs +++ b/crates/common/src/comments/mod.rs @@ -1,7 +1,7 @@ mod comment; use comment::{Comment, CommentStyle}; -use solar_parse::{ +use solar::parse::{ ast::{CommentKind, Span}, interface::{BytePos, CharPos, SourceMap, source_map::SourceFile}, lexer::token::RawTokenKind as TokenKind, @@ -89,7 +89,7 @@ fn gather_comments(sf: &SourceFile) -> Vec { } */ - for token in solar_parse::Cursor::new(&text[pos..]) { + for token in solar::parse::Cursor::new(&text[pos..]) { let token_range = pos..pos + token.len as usize; let span = make_span(token_range.clone()); let token_text = &text[token_range]; diff --git a/crates/common/src/preprocessor/data.rs b/crates/common/src/preprocessor/data.rs index 6cc988137e167..d76c025a9f63a 100644 --- a/crates/common/src/preprocessor/data.rs +++ b/crates/common/src/preprocessor/data.rs @@ -1,9 +1,9 @@ use super::span_to_range; use foundry_compilers::artifacts::{Source, Sources}; use path_slash::PathExt; -use solar_parse::interface::{Session, SourceMap}; -use solar_sema::{ - hir::{Contract, ContractId, Hir}, +use solar::sema::{ + Gcx, + hir::{Contract, ContractId}, interface::source_map::FileName, }; use std::{ @@ -17,21 +17,19 @@ pub type PreprocessorData = BTreeMap; /// Collects preprocessor data from referenced contracts. pub(crate) fn collect_preprocessor_data( - sess: &Session, - hir: &Hir<'_>, + gcx: Gcx<'_>, referenced_contracts: &HashSet, ) -> PreprocessorData { let mut data = PreprocessorData::default(); for contract_id in referenced_contracts { - let contract = hir.contract(*contract_id); - let source = hir.source(contract.source); + let contract = gcx.hir.contract(*contract_id); + let source = gcx.hir.source(contract.source); let FileName::Real(path) = &source.file.name else { continue; }; - let contract_data = - ContractData::new(hir, *contract_id, contract, path, source, sess.source_map()); + let contract_data = ContractData::new(gcx, *contract_id, contract, path, source); data.insert(*contract_id, contract_data); } data @@ -77,19 +75,18 @@ pub(crate) struct ContractData { impl ContractData { fn new( - hir: &Hir<'_>, + gcx: Gcx<'_>, contract_id: ContractId, contract: &Contract<'_>, path: &Path, - source: &solar_sema::hir::Source<'_>, - source_map: &SourceMap, + source: &solar::sema::hir::Source<'_>, ) -> Self { let artifact = format!("{}:{}", path.to_slash_lossy(), contract.name); // Process data for contracts with constructor and parameters. let constructor_data = contract .ctor - .map(|ctor_id| hir.function(ctor_id)) + .map(|ctor_id| gcx.hir.function(ctor_id)) .filter(|ctor| !ctor.parameters.is_empty()) .map(|ctor| { let mut abi_encode_args = vec![]; @@ -97,9 +94,10 @@ impl ContractData { let mut arg_index = 0; for param_id in ctor.parameters { let src = source.file.src.as_str(); - let loc = span_to_range(source_map, hir.variable(*param_id).span); + let loc = + span_to_range(gcx.sess.source_map(), gcx.hir.variable(*param_id).span); let mut new_src = src[loc].replace(" memory ", " ").replace(" calldata ", " "); - if let Some(ident) = hir.variable(*param_id).name { + if let Some(ident) = gcx.hir.variable(*param_id).name { abi_encode_args.push(format!("args.{}", ident.name)); } else { // Generate an unique name if constructor arg doesn't have one. diff --git a/crates/common/src/preprocessor/deps.rs b/crates/common/src/preprocessor/deps.rs index 01d90f3a0380e..a5c25cbe9ab13 100644 --- a/crates/common/src/preprocessor/deps.rs +++ b/crates/common/src/preprocessor/deps.rs @@ -4,9 +4,9 @@ use super::{ }; use foundry_compilers::Updates; use itertools::Itertools; -use solar_parse::interface::Session; -use solar_sema::{ - hir::{CallArgs, ContractId, Expr, ExprKind, Hir, NamedArg, Stmt, StmtKind, TypeKind, Visit}, +use solar::sema::{ + Gcx, Hir, + hir::{CallArgs, ContractId, Expr, ExprKind, NamedArg, Stmt, StmtKind, TypeKind, Visit}, interface::{SourceMap, data_structures::Never, source_map::FileName}, }; use std::{ @@ -25,8 +25,7 @@ pub(crate) struct PreprocessorDependencies { impl PreprocessorDependencies { pub fn new( - sess: &Session, - hir: &Hir<'_>, + gcx: Gcx<'_>, paths: &[PathBuf], src_dir: &Path, root_dir: &Path, @@ -34,9 +33,9 @@ impl PreprocessorDependencies { ) -> Self { let mut preprocessed_contracts = BTreeMap::new(); let mut referenced_contracts = HashSet::new(); - for contract_id in hir.contract_ids() { - let contract = hir.contract(contract_id); - let source = hir.source(contract.source); + for contract_id in gcx.hir.contract_ids() { + let contract = gcx.hir.contract(contract_id); + let source = gcx.hir.source(contract.source); let FileName::Real(path) = &source.file.name else { continue; @@ -52,8 +51,8 @@ impl PreprocessorDependencies { // Do not collect dependencies for mock contracts. Walk through base contracts and // check if they're from src dir. if contract.linearized_bases.iter().any(|base_contract_id| { - let base_contract = hir.contract(*base_contract_id); - let FileName::Real(path) = &hir.source(base_contract.source).file.name else { + let base_contract = gcx.hir.contract(*base_contract_id); + let FileName::Real(path) = &gcx.hir.source(base_contract.source).file.name else { return false; }; path.starts_with(src_dir) @@ -69,12 +68,8 @@ impl PreprocessorDependencies { mocks.remove(&root_dir.join(path)); } - let mut deps_collector = BytecodeDependencyCollector::new( - sess.source_map(), - hir, - source.file.src.as_str(), - src_dir, - ); + let mut deps_collector = + BytecodeDependencyCollector::new(gcx, source.file.src.as_str(), src_dir); // Analyze current contract. let _ = deps_collector.walk_contract(contract); // Ignore empty test contracts declared in source files with other contracts. @@ -122,44 +117,30 @@ pub(crate) struct BytecodeDependency { } /// Walks over contract HIR and collects [`BytecodeDependency`]s and referenced contracts. -struct BytecodeDependencyCollector<'hir> { +struct BytecodeDependencyCollector<'gcx, 'src> { /// Source map, used for determining contract item locations. - source_map: &'hir SourceMap, - /// Parsed HIR. - hir: &'hir Hir<'hir>, + gcx: Gcx<'gcx>, /// Source content of current contract. - src: &'hir str, + src: &'src str, /// Project source dir, used to determine if referenced contract is a source contract. - src_dir: &'hir Path, + src_dir: &'src Path, /// Dependencies collected for current contract. dependencies: Vec, /// Unique HIR ids of contracts referenced from current contract. referenced_contracts: HashSet, } -impl<'hir> BytecodeDependencyCollector<'hir> { - fn new( - source_map: &'hir SourceMap, - hir: &'hir Hir<'hir>, - src: &'hir str, - src_dir: &'hir Path, - ) -> Self { - Self { - source_map, - hir, - src, - src_dir, - dependencies: vec![], - referenced_contracts: HashSet::default(), - } +impl<'gcx, 'src> BytecodeDependencyCollector<'gcx, 'src> { + fn new(gcx: Gcx<'gcx>, src: &'src str, src_dir: &'src Path) -> Self { + Self { gcx, src, src_dir, dependencies: vec![], referenced_contracts: HashSet::default() } } /// Collects reference identified as bytecode dependency of analyzed contract. /// Discards any reference that is not in project src directory (e.g. external /// libraries or mock contracts that extend source contracts). fn collect_dependency(&mut self, dependency: BytecodeDependency) { - let contract = self.hir.contract(dependency.referenced_contract); - let source = self.hir.source(contract.source); + let contract = self.gcx.hir.contract(dependency.referenced_contract); + let source = self.gcx.hir.source(contract.source); let FileName::Real(path) = &source.file.name else { return; }; @@ -175,19 +156,19 @@ impl<'hir> BytecodeDependencyCollector<'hir> { } } -impl<'hir> Visit<'hir> for BytecodeDependencyCollector<'hir> { +impl<'gcx> Visit<'gcx> for BytecodeDependencyCollector<'gcx, '_> { type BreakValue = Never; - fn hir(&self) -> &'hir Hir<'hir> { - self.hir + fn hir(&self) -> &'gcx Hir<'gcx> { + &self.gcx.hir } - fn visit_expr(&mut self, expr: &'hir Expr<'hir>) -> ControlFlow { + fn visit_expr(&mut self, expr: &'gcx Expr<'gcx>) -> ControlFlow { match &expr.kind { ExprKind::Call(call_expr, call_args, named_args) => { if let Some(dependency) = handle_call_expr( self.src, - self.source_map, + self.gcx.sess.source_map(), expr, call_expr, call_args, @@ -205,7 +186,7 @@ impl<'hir> Visit<'hir> for BytecodeDependencyCollector<'hir> { { self.collect_dependency(BytecodeDependency { kind: BytecodeDependencyKind::CreationCode, - loc: span_to_range(self.source_map, expr.span), + loc: span_to_range(self.gcx.sess.source_map(), expr.span), referenced_contract: contract_id, }); } @@ -215,12 +196,12 @@ impl<'hir> Visit<'hir> for BytecodeDependencyCollector<'hir> { self.walk_expr(expr) } - fn visit_stmt(&mut self, stmt: &'hir Stmt<'hir>) -> ControlFlow { + fn visit_stmt(&mut self, stmt: &'gcx Stmt<'gcx>) -> ControlFlow { if let StmtKind::Try(stmt_try) = stmt.kind && let ExprKind::Call(call_expr, call_args, named_args) = &stmt_try.expr.kind && let Some(dependency) = handle_call_expr( self.src, - self.source_map, + self.gcx.sess.source_map(), &stmt_try.expr, call_expr, call_args, @@ -304,14 +285,14 @@ fn named_arg( /// Goes over all test/script files and replaces bytecode dependencies with cheatcode /// invocations. pub(crate) fn remove_bytecode_dependencies( - hir: &Hir<'_>, + gcx: Gcx<'_>, deps: &PreprocessorDependencies, data: &PreprocessorData, ) -> Updates { let mut updates = Updates::default(); for (contract_id, deps) in &deps.preprocessed_contracts { - let contract = hir.contract(*contract_id); - let source = hir.source(contract.source); + let contract = gcx.hir.contract(*contract_id); + let source = gcx.hir.source(contract.source); let FileName::Real(path) = &source.file.name else { continue; }; diff --git a/crates/common/src/preprocessor/mod.rs b/crates/common/src/preprocessor/mod.rs index a0257f94f7893..e890093bd2cb7 100644 --- a/crates/common/src/preprocessor/mod.rs +++ b/crates/common/src/preprocessor/mod.rs @@ -1,17 +1,17 @@ use foundry_compilers::{ - Compiler, Language, ProjectPathsConfig, apply_updates, + Compiler, ProjectPathsConfig, SourceParser, apply_updates, artifacts::SolcLanguage, error::Result, multi::{MultiCompiler, MultiCompilerInput, MultiCompilerLanguage}, project::Preprocessor, solc::{SolcCompiler, SolcVersionedInput}, }; -use solar_parse::{ - ast::Span, - interface::{Session, SourceMap}, +use solar::parse::{ast::Span, interface::SourceMap}; +use std::{ + collections::HashSet, + ops::{ControlFlow, Range}, + path::PathBuf, }; -use solar_sema::{ParsingContext, thread_local::ThreadLocal}; -use std::{collections::HashSet, ops::Range, path::PathBuf}; mod data; use data::{collect_preprocessor_data, create_deploy_helpers}; @@ -49,10 +49,11 @@ impl Preprocessor for DynamicTestLinkingPreprocessor { return Ok(()); } - let sess = solar_session_from_solc(input); - let _ = sess.enter_parallel(|| -> solar_parse::interface::Result { - // Set up the parsing context with the project paths. - let mut parsing_context = solar_pcx_from_solc_no_sources(&sess, input, paths); + let mut compiler = + foundry_compilers::resolver::parse::SolParser::new(paths.with_language_ref()) + .into_compiler(); + let _ = compiler.enter_mut(|compiler| -> solar::parse::interface::Result { + let mut pcx = compiler.parse(); // Add the sources into the context. // Include all sources in the source map so as to not re-load them from disk, but only @@ -60,44 +61,44 @@ impl Preprocessor for DynamicTestLinkingPreprocessor { let mut preprocessed_paths = vec![]; let sources = &mut input.input.sources; for (path, source) in sources.iter() { - if let Ok(src_file) = - sess.source_map().new_source_file(path.clone(), source.content.as_str()) + if let Ok(src_file) = compiler + .sess() + .source_map() + .new_source_file(path.clone(), source.content.as_str()) && paths.is_test_or_script(path) { - parsing_context.add_file(src_file); + pcx.add_file(src_file); preprocessed_paths.push(path.clone()); } } // Parse and preprocess. - let hir_arena = ThreadLocal::new(); - if let Some(gcx) = parsing_context.parse_and_lower(&hir_arena)? { - let hir = &gcx.get().hir; - // Collect tests and scripts dependencies and identify mock contracts. - let deps = PreprocessorDependencies::new( - &sess, - hir, - &preprocessed_paths, - &paths.paths_relative().sources, - &paths.root, - mocks, - ); - // Collect data of source contracts referenced in tests and scripts. - let data = collect_preprocessor_data(&sess, hir, &deps.referenced_contracts); - - // Extend existing sources with preprocessor deploy helper sources. - sources.extend(create_deploy_helpers(&data)); - - // Generate and apply preprocessor source updates. - apply_updates(sources, remove_bytecode_dependencies(hir, &deps, &data)); - } + pcx.parse(); + let ControlFlow::Continue(()) = compiler.lower_asts()? else { return Ok(()) }; + let gcx = compiler.gcx(); + // Collect tests and scripts dependencies and identify mock contracts. + let deps = PreprocessorDependencies::new( + gcx, + &preprocessed_paths, + &paths.paths_relative().sources, + &paths.root, + mocks, + ); + // Collect data of source contracts referenced in tests and scripts. + let data = collect_preprocessor_data(gcx, &deps.referenced_contracts); + + // Extend existing sources with preprocessor deploy helper sources. + sources.extend(create_deploy_helpers(&data)); + + // Generate and apply preprocessor source updates. + apply_updates(sources, remove_bytecode_dependencies(gcx, &deps, &data)); Ok(()) }); // Warn if any diagnostics emitted during content parsing. - if let Err(err) = sess.emitted_errors().unwrap() { - warn!("failed preprocessing {err}"); + if let Err(err) = compiler.sess().emitted_errors().unwrap() { + warn!("failed preprocessing:\n{err}"); } Ok(()) @@ -121,42 +122,3 @@ impl Preprocessor for DynamicTestLinkingPreprocessor { self.preprocess(solc, input, &paths, mocks) } } - -fn solar_session_from_solc(solc: &SolcVersionedInput) -> Session { - use solar_parse::interface::config; - - Session::builder() - .with_buffer_emitter(Default::default()) - .opts(config::Opts { - language: match solc.input.language { - SolcLanguage::Solidity => config::Language::Solidity, - SolcLanguage::Yul => config::Language::Yul, - _ => unimplemented!(), - }, - - // TODO: ... - /* - evm_version: solc.input.settings.evm_version, - */ - ..Default::default() - }) - .build() -} - -fn solar_pcx_from_solc_no_sources<'sess>( - sess: &'sess Session, - solc: &SolcVersionedInput, - paths: &ProjectPathsConfig, -) -> ParsingContext<'sess> { - let mut pcx = ParsingContext::new(sess); - pcx.file_resolver.set_current_dir(solc.cli_settings.base_path.as_ref().unwrap_or(&paths.root)); - for remapping in &paths.remappings { - pcx.file_resolver.add_import_remapping(solar_sema::interface::config::ImportRemapping { - context: remapping.context.clone().unwrap_or_default(), - prefix: remapping.name.clone(), - path: remapping.path.clone(), - }); - } - pcx.file_resolver.add_include_paths(solc.cli_settings.include_paths.iter().cloned()); - pcx -} diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml index ae743a751a39c..14501769887c0 100644 --- a/crates/config/Cargo.toml +++ b/crates/config/Cargo.toml @@ -22,8 +22,7 @@ alloy-primitives = { workspace = true, features = ["serde"] } revm.workspace = true -solar-parse.workspace = true -solar-interface.workspace = true +solar.workspace = true dirs.workspace = true dunce.workspace = true diff --git a/crates/config/src/inline/natspec.rs b/crates/config/src/inline/natspec.rs index 400637d1cf45e..0c9218dbd18f1 100644 --- a/crates/config/src/inline/natspec.rs +++ b/crates/config/src/inline/natspec.rs @@ -6,13 +6,7 @@ use foundry_compilers::{ }; use itertools::Itertools; use serde_json::Value; -use solar_parse::{ - Parser, - ast::{ - Arena, CommentKind, Item, ItemKind, - interface::{self, Session}, - }, -}; +use solar::parse::ast; use std::{collections::BTreeMap, path::Path}; /// Convenient struct to hold in-line per-test configurations @@ -32,11 +26,12 @@ impl NatSpec { /// Factory function that extracts a vector of [`NatSpec`] instances from /// a solc compiler output. The root path is to express contract base dirs. /// That is essential to match per-test configs at runtime. + #[instrument(name = "NatSpec::parse", skip_all)] pub fn parse(output: &ProjectCompileOutput, root: &Path) -> Vec { let mut natspecs: Vec = vec![]; - let solc = SolcParser::new(); let solar = SolarParser::new(); + let solc = SolcParser::new(); for (id, artifact) in output.artifact_ids() { let abs_path = id.source.as_path(); let path = abs_path.strip_prefix(root).unwrap_or(abs_path); @@ -44,16 +39,22 @@ impl NatSpec { // `id.identifier` but with the stripped path. let contract = format!("{}:{}", path.display(), id.name); - let mut used_solc_ast = false; - if let Some(ast) = &artifact.ast + let mut used_solar = false; + let compiler = output.parser().solc().compiler(); + compiler.enter(|compiler| { + if let Some((_, source)) = compiler.gcx().get_ast_source(abs_path) + && let Some(ast) = &source.ast + { + solar.parse_ast(&mut natspecs, ast, &contract, contract_name); + used_solar = true; + } + }); + + if !used_solar + && let Some(ast) = &artifact.ast && let Some(node) = solc.contract_root_node(&ast.nodes, &contract) { solc.parse(&mut natspecs, &contract, node, true); - used_solc_ast = true; - } - - if !used_solc_ast && let Ok(src) = std::fs::read_to_string(abs_path) { - solar.parse(&mut natspecs, &src, &contract, contract_name); } } @@ -211,19 +212,14 @@ impl SolarParser { Self { _private: () } } - fn parse( + fn parse_ast( &self, natspecs: &mut Vec, - src: &str, + source_unit: &ast::SourceUnit<'_>, contract_id: &str, contract_name: &str, ) { - // Fast path to avoid parsing the file. - if !src.contains(INLINE_CONFIG_PREFIX) { - return; - } - - let mut handle_docs = |item: &Item<'_>| { + let mut handle_docs = |item: &ast::Item<'_>| { if item.docs.is_empty() { return; } @@ -236,8 +232,8 @@ impl SolarParser { return None; } match d.kind { - CommentKind::Line => Some(s.trim().to_string()), - CommentKind::Block => Some( + ast::CommentKind::Line => Some(s.trim().to_string()), + ast::CommentKind::Block => Some( s.lines() .filter(|line| line.contains(INLINE_CONFIG_PREFIX)) .map(|line| line.trim_start().trim_start_matches('*').trim()) @@ -254,7 +250,7 @@ impl SolarParser { item.docs.iter().map(|doc| doc.span).reduce(|a, b| a.to(b)).unwrap_or_default(); natspecs.push(NatSpec { contract: contract_id.to_string(), - function: if let ItemKind::Function(f) = &item.kind { + function: if let ast::ItemKind::Function(f) = &item.kind { Some( f.header .name @@ -269,6 +265,42 @@ impl SolarParser { }); }; + for item in source_unit.items.iter() { + let ast::ItemKind::Contract(c) = &item.kind else { continue }; + if c.name.as_str() != contract_name { + continue; + } + + // Handle contract level doc comments. + handle_docs(item); + + // Handle function level doc comments. + for item in c.body.iter() { + let ast::ItemKind::Function(_) = &item.kind else { continue }; + handle_docs(item); + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + use solar::parse::{ + Parser, + ast::{ + Arena, + interface::{self, Session}, + }, + }; + + fn parse(natspecs: &mut Vec, src: &str, contract_id: &str, contract_name: &str) { + // Fast path to avoid parsing the file. + if !src.contains(INLINE_CONFIG_PREFIX) { + return; + } + let sess = Session::builder() .with_silent_emitter(Some("Inline config parsing failed".to_string())) .build(); @@ -284,31 +316,11 @@ impl SolarParser { let source_unit = parser.parse_file().map_err(|e| e.emit())?; - for item in source_unit.items.iter() { - let ItemKind::Contract(c) = &item.kind else { continue }; - if c.name.as_str() != contract_name { - continue; - } - - // Handle contract level doc comments. - handle_docs(item); - - // Handle function level doc comments. - for item in c.body.iter() { - let ItemKind::Function(_) = &item.kind else { continue }; - handle_docs(item); - } - } + SolarParser::new().parse_ast(natspecs, &source_unit, contract_id, contract_name); Ok(()) }); } -} - -#[cfg(test)] -mod tests { - use super::*; - use serde_json::json; #[test] fn can_reject_invalid_profiles() { @@ -366,8 +378,7 @@ function f2() {} /** forge-config: default.fuzz.runs = 800 */ function f3() {} "; let mut natspecs = vec![]; let id = || "path.sol:C".to_string(); - let solar_parser = SolarParser::new(); - solar_parser.parse(&mut natspecs, src, &id(), "C"); + parse(&mut natspecs, src, &id(), "C"); assert_eq!( natspecs, [ @@ -422,9 +433,8 @@ contract FuzzInlineConf is DSTest { } "#; let mut natspecs = vec![]; - let solar = SolarParser::new(); let id = || "inline/FuzzInlineConf.t.sol:FuzzInlineConf".to_string(); - solar.parse(&mut natspecs, src, &id(), "FuzzInlineConf"); + parse(&mut natspecs, src, &id(), "FuzzInlineConf"); assert_eq!( natspecs, [ @@ -511,9 +521,8 @@ contract FuzzInlineConf2 is DSTest { } "#; let mut natspecs = vec![]; - let solar = SolarParser::new(); let id = || "inline/FuzzInlineConf.t.sol:FuzzInlineConf".to_string(); - solar.parse(&mut natspecs, src, &id(), "FuzzInlineConf"); + parse(&mut natspecs, src, &id(), "FuzzInlineConf"); assert_eq!( natspecs, [NatSpec { @@ -526,7 +535,7 @@ contract FuzzInlineConf2 is DSTest { let mut natspecs = vec![]; let id = || "inline/FuzzInlineConf2.t.sol:FuzzInlineConf2".to_string(); - solar.parse(&mut natspecs, src, &id(), "FuzzInlineConf2"); + parse(&mut natspecs, src, &id(), "FuzzInlineConf2"); assert_eq!( natspecs, [NatSpec { @@ -555,9 +564,8 @@ contract FuzzInlineConf is DSTest { function testInlineConfFuzz2() {} }"#; let mut natspecs = vec![]; - let solar = SolarParser::new(); let id = || "inline/FuzzInlineConf.t.sol:FuzzInlineConf".to_string(); - solar.parse(&mut natspecs, src, &id(), "FuzzInlineConf"); + parse(&mut natspecs, src, &id(), "FuzzInlineConf"); assert_eq!( natspecs, [ diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index b126069c90770..e7ec1854d4dc0 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -36,7 +36,7 @@ use foundry_compilers::{ vyper::{Vyper, VyperSettings}, }, error::SolcError, - multi::{MultiCompilerParsedSource, MultiCompilerRestrictions}, + multi::{MultiCompilerParser, MultiCompilerRestrictions}, solc::{CliSettings, SolcSettings}, }; use regex::Regex; @@ -130,6 +130,8 @@ pub use compilation::{CompilationRestrictions, SettingsOverrides}; pub mod extend; use extend::Extends; +pub use semver; + /// Foundry configuration /// /// # Defaults @@ -983,7 +985,7 @@ impl Config { return Ok(BTreeMap::new()); } - let graph = Graph::::resolve(paths)?; + let graph = Graph::::resolve(paths)?; let (sources, _) = graph.into_sources(); for res in &self.compilation_restrictions { diff --git a/crates/config/src/lint.rs b/crates/config/src/lint.rs index 472bf3dd79266..a7e0d9f80f118 100644 --- a/crates/config/src/lint.rs +++ b/crates/config/src/lint.rs @@ -3,7 +3,7 @@ use clap::ValueEnum; use core::fmt; use serde::{Deserialize, Deserializer, Serialize}; -use solar_interface::diagnostics::Level; +use solar::interface::diagnostics::Level; use std::str::FromStr; use yansi::Paint; diff --git a/crates/evm/traces/Cargo.toml b/crates/evm/traces/Cargo.toml index f555d619fa228..6a2ba17b23372 100644 --- a/crates/evm/traces/Cargo.toml +++ b/crates/evm/traces/Cargo.toml @@ -41,7 +41,7 @@ tokio = { workspace = true, features = ["time", "macros"] } tracing.workspace = true tempfile.workspace = true rayon.workspace = true -solar-parse.workspace = true +solar.workspace = true revm.workspace = true [dev-dependencies] diff --git a/crates/evm/traces/src/debug/sources.rs b/crates/evm/traces/src/debug/sources.rs index 579291ea6fd98..48a4b4bae7342 100644 --- a/crates/evm/traces/src/debug/sources.rs +++ b/crates/evm/traces/src/debug/sources.rs @@ -11,7 +11,7 @@ use foundry_compilers::{ use foundry_evm_core::ic::PcIcMap; use foundry_linking::Linker; use rayon::prelude::*; -use solar_parse::{Parser, interface::Session}; +use solar::parse::{Parser, interface::Session}; use std::{ collections::{BTreeMap, HashMap, HashSet}, fmt::Write, @@ -43,14 +43,14 @@ impl SourceData { } MultiCompilerLanguage::Solc(_) => { let sess = Session::builder().with_silent_emitter(None).build(); - let _ = sess.enter(|| -> solar_parse::interface::Result<()> { - let arena = solar_parse::ast::Arena::new(); + let _ = sess.enter(|| -> solar::parse::interface::Result<()> { + let arena = solar::parse::ast::Arena::new(); let filename = path.clone().into(); let mut parser = Parser::from_source_code(&sess, &arena, filename, source.to_string())?; let ast = parser.parse_file().map_err(|e| e.emit())?; for item in ast.items { - if let solar_parse::ast::ItemKind::Contract(contract) = &item.kind { + if let solar::parse::ast::ItemKind::Contract(contract) = &item.kind { let range = item.span.lo().to_usize()..item.span.hi().to_usize(); contract_definitions.push((contract.name.to_string(), range)); } diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index 59420cd7ce2e3..d7582ac321ed8 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -77,8 +77,7 @@ semver.workspace = true serde_json.workspace = true similar = { version = "2", features = ["inline"] } solang-parser.workspace = true -solar-parse.workspace = true -solar-sema.workspace = true +solar.workspace = true strum = { workspace = true, features = ["derive"] } thiserror.workspace = true tokio = { workspace = true, features = ["time"] } diff --git a/crates/forge/src/cmd/bind_json.rs b/crates/forge/src/cmd/bind_json.rs index 59c5f741658b8..3ffa650f77d67 100644 --- a/crates/forge/src/cmd/bind_json.rs +++ b/crates/forge/src/cmd/bind_json.rs @@ -2,14 +2,14 @@ use super::eip712::Resolver; use clap::{Parser, ValueHint}; use eyre::Result; use foundry_cli::{ - opts::{BuildOpts, solar_pcx_from_solc_project}, + opts::{BuildOpts, configure_pcx_from_solc}, utils::LoadConfig, }; use foundry_common::{TYPE_BINDING_PREFIX, fs}; use foundry_compilers::{ CompilerInput, Graph, Project, artifacts::{Source, Sources}, - multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, + multi::{MultiCompilerLanguage, MultiCompilerParser}, solc::{SolcLanguage, SolcVersionedInput}, }; use foundry_config::Config; @@ -17,12 +17,11 @@ use itertools::Itertools; use path_slash::PathExt; use rayon::prelude::*; use semver::Version; -use solar_parse::{ +use solar::parse::{ Parser as SolarParser, ast::{self, Arena, FunctionKind, Span, VarMut, interface::source_map::FileName, visit::Visit}, interface::Session, }; -use solar_sema::thread_local::ThreadLocal; use std::{ collections::{BTreeMap, BTreeSet, HashSet}, fmt::Write, @@ -54,7 +53,7 @@ impl BindJsonArgs { // Step 1: Read and preprocess sources let sources = project.paths.read_input_files()?; - let graph = Graph::::resolve_sources(&project.paths, sources)?; + let graph = Graph::::resolve_sources(&project.paths, sources)?; // We only generate bindings for a single Solidity version to avoid conflicts. let (version, mut sources, _) = graph @@ -104,7 +103,7 @@ impl BindJsonArgs { /// in most of the cases. fn preprocess_sources(&self, sources: &mut Sources) -> Result<()> { let sess = Session::builder().with_stderr_emitter().build(); - let result = sess.enter_parallel(|| -> solar_parse::interface::Result<()> { + let result = sess.enter(|| -> solar::parse::interface::Result<()> { sources.0.par_iter_mut().try_for_each(|(path, source)| { let mut content = Arc::try_unwrap(std::mem::take(&mut source.content)).unwrap(); @@ -147,12 +146,14 @@ impl BindJsonArgs { let mut sess = Session::builder().with_stderr_emitter().build(); sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + let mut compiler = solar::sema::Compiler::new(sess); let mut structs_to_write = Vec::new(); - sess.enter_parallel(|| -> Result<()> { + compiler.enter_mut(|compiler| -> Result<()> { // Set up the parsing context with the project paths, without adding the source files - let mut parsing_context = solar_pcx_from_solc_project(&sess, project, &input, false); + let mut pcx = compiler.parse(); + configure_pcx_from_solc(&mut pcx, project, &input, false); let mut target_files = HashSet::new(); for (path, source) in &input.input.sources { @@ -171,51 +172,50 @@ impl BindJsonArgs { continue; } - if let Ok(src_file) = - sess.source_map().new_source_file(path.clone(), source.content.as_str()) + if let Ok(src_file) = compiler + .sess() + .source_map() + .new_source_file(path.clone(), source.content.as_str()) { target_files.insert(src_file.stable_id); - parsing_context.add_file(src_file); + pcx.add_file(src_file); } } // Parse and resolve - let hir_arena = ThreadLocal::new(); - if let Ok(Some(gcx)) = parsing_context.parse_and_lower(&hir_arena) { - let hir = &gcx.get().hir; - let resolver = Resolver::new(gcx); - for id in resolver.struct_ids() { - if let Some(schema) = resolver.resolve_struct_eip712(id) { - let def = hir.strukt(id); - let source = hir.source(def.source); - - if !target_files.contains(&source.file.stable_id) { - continue; - } - - if let FileName::Real(ref path) = source.file.name { - structs_to_write.push(StructToWrite { - name: def.name.as_str().into(), - contract_name: def - .contract - .map(|id| hir.contract(id).name.as_str().into()), - path: path - .strip_prefix(root) - .unwrap_or_else(|_| path) - .to_path_buf(), - schema, - // will be filled later - import_alias: None, - name_in_fns: String::new(), - }); - } + pcx.parse(); + let Ok(ControlFlow::Continue(())) = compiler.lower_asts() else { return Ok(()) }; + let gcx = compiler.gcx(); + let hir = &gcx.hir; + let resolver = Resolver::new(gcx); + for id in resolver.struct_ids() { + if let Some(schema) = resolver.resolve_struct_eip712(id) { + let def = hir.strukt(id); + let source = hir.source(def.source); + + if !target_files.contains(&source.file.stable_id) { + continue; + } + + if let FileName::Real(path) = &source.file.name { + structs_to_write.push(StructToWrite { + name: def.name.as_str().into(), + contract_name: def + .contract + .map(|id| hir.contract(id).name.as_str().into()), + path: path.strip_prefix(root).unwrap_or(path).to_path_buf(), + schema, + // will be filled later + import_alias: None, + name_in_fns: String::new(), + }); } } } Ok(()) })?; - eyre::ensure!(sess.dcx.has_errors().is_ok(), "errors occurred"); + eyre::ensure!(compiler.sess().dcx.has_errors().is_ok(), "errors occurred"); // Resolve import aliases and function names self.resolve_conflicts(&mut structs_to_write); @@ -426,7 +426,7 @@ impl PreprocessorVisitor { } impl<'ast> Visit<'ast> for PreprocessorVisitor { - type BreakValue = solar_parse::interface::data_structures::Never; + type BreakValue = solar::parse::interface::data_structures::Never; fn visit_item_function( &mut self, diff --git a/crates/forge/src/cmd/build.rs b/crates/forge/src/cmd/build.rs index 35354fd7a72b0..936384a352f80 100644 --- a/crates/forge/src/cmd/build.rs +++ b/crates/forge/src/cmd/build.rs @@ -3,7 +3,7 @@ use clap::Parser; use eyre::{Result, eyre}; use forge_lint::{linter::Linter, sol::SolidityLinter}; use foundry_cli::{ - opts::{BuildOpts, solar_pcx_from_build_opts}, + opts::{BuildOpts, configure_pcx}, utils::{LoadConfig, cache_local_signatures}, }; use foundry_common::{compile::ProjectCompiler, shell}; @@ -168,23 +168,15 @@ impl BuildArgs { .collect::>(); if !input_files.is_empty() { - let sess = linter.init(); - - let pcx = solar_pcx_from_build_opts( - &sess, - &self.build, - Some(project), - Some(&input_files), - )?; - linter.early_lint(&input_files, pcx); - - let pcx = solar_pcx_from_build_opts( - &sess, - &self.build, - Some(project), - Some(&input_files), - )?; - linter.late_lint(&input_files, pcx); + let mut compiler = linter.init(); + compiler.enter_mut(|compiler| -> Result<()> { + let mut pcx = compiler.parse(); + configure_pcx(&mut pcx, config, Some(project), Some(&input_files))?; + pcx.parse(); + let _ = compiler.lower_asts(); + Ok(()) + })?; + linter.lint(&input_files, &mut compiler); } } diff --git a/crates/forge/src/cmd/eip712.rs b/crates/forge/src/cmd/eip712.rs index 3c72e0416f512..d227406f9e77d 100644 --- a/crates/forge/src/cmd/eip712.rs +++ b/crates/forge/src/cmd/eip712.rs @@ -1,18 +1,23 @@ use alloy_primitives::{B256, keccak256}; use clap::{Parser, ValueHint}; use eyre::Result; -use foundry_cli::opts::{BuildOpts, solar_pcx_from_build_opts}; +use foundry_cli::{ + opts::{BuildOpts, configure_pcx}, + utils::LoadConfig, +}; use serde::Serialize; -use solar_parse::interface::Session; -use solar_sema::{ - GcxWrapper, Hir, - hir::StructId, - thread_local::ThreadLocal, - ty::{Ty, TyKind}, +use solar::{ + parse::interface::Session, + sema::{ + Gcx, Hir, + hir::StructId, + ty::{Ty, TyKind}, + }, }; use std::{ collections::BTreeMap, fmt::{Display, Formatter, Result as FmtResult, Write}, + ops::ControlFlow, path::{Path, PathBuf}, slice, }; @@ -52,23 +57,21 @@ impl Display for Eip712Output { impl Eip712Args { pub fn run(self) -> Result<()> { + let config = self.build.load_config()?; + let mut sess = Session::builder().with_stderr_emitter().build(); sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + let mut compiler = solar::sema::Compiler::new(sess); - sess.enter_parallel(|| -> Result<()> { + compiler.enter_mut(|compiler| -> Result<()> { // Set up the parsing context with the project paths and sources. - let parsing_context = solar_pcx_from_build_opts( - &sess, - &self.build, - None, - Some(slice::from_ref(&self.target_path)), - )?; + let mut pcx = compiler.parse(); + configure_pcx(&mut pcx, &config, None, Some(slice::from_ref(&self.target_path)))?; // Parse and resolve - let hir_arena = ThreadLocal::new(); - let Ok(Some(gcx)) = parsing_context.parse_and_lower(&hir_arena) else { - return Err(eyre::eyre!("failed parsing")); - }; + pcx.parse(); + let Ok(ControlFlow::Continue(())) = compiler.lower_asts() else { return Ok(()) }; + let gcx = compiler.gcx(); let resolver = Resolver::new(gcx); let outputs = resolver @@ -94,7 +97,7 @@ impl Eip712Args { Ok(()) })?; - eyre::ensure!(sess.dcx.has_errors().is_ok(), "errors occurred"); + eyre::ensure!(compiler.sess().dcx.has_errors().is_ok(), "errors occurred"); Ok(()) } @@ -103,19 +106,19 @@ impl Eip712Args { /// Generates the EIP-712 `encodeType` string for a given struct. /// /// Requires a reference to the source HIR. -pub struct Resolver<'hir> { - gcx: GcxWrapper<'hir>, +pub struct Resolver<'gcx> { + gcx: Gcx<'gcx>, } -impl<'hir> Resolver<'hir> { +impl<'gcx> Resolver<'gcx> { /// Constructs a new [`Resolver`] for the supplied [`Hir`] instance. - pub fn new(gcx: GcxWrapper<'hir>) -> Self { + pub fn new(gcx: Gcx<'gcx>) -> Self { Self { gcx } } #[inline] - fn hir(&self) -> &'hir Hir<'hir> { - &self.gcx.get().hir + fn hir(&self) -> &'gcx Hir<'gcx> { + &self.gcx.hir } /// Returns the [`StructId`]s of every user-defined struct in source order. @@ -128,7 +131,7 @@ impl<'hir> Resolver<'hir> { let strukt = self.hir().strukt(id).name.as_str(); match self.hir().strukt(id).contract { Some(cid) => { - let full_name = self.gcx.get().contract_fully_qualified_name(cid).to_string(); + let full_name = self.gcx.contract_fully_qualified_name(cid).to_string(); let relevant = Path::new(&full_name) .file_name() .and_then(|s| s.to_str()) @@ -166,7 +169,7 @@ impl<'hir> Resolver<'hir> { for (idx, field_id) in def.fields.iter().enumerate() { let field = self.hir().variable(*field_id); - let ty = self.resolve_type(self.gcx.get().type_of_hir_ty(&field.ty), subtypes)?; + let ty = self.resolve_type(self.gcx.type_of_hir_ty(&field.ty), subtypes)?; write!(result, "{ty} {name}", name = field.name?.as_str()).ok()?; @@ -196,7 +199,7 @@ impl<'hir> Resolver<'hir> { fn resolve_type( &self, - ty: Ty<'hir>, + ty: Ty<'gcx>, subtypes: &mut BTreeMap, ) -> Option { let ty = ty.peel_refs(); @@ -229,7 +232,7 @@ impl<'hir> Resolver<'hir> { // Recursively resolve fields to populate subtypes for &field_id in def.fields { - let field_ty = self.gcx.get().type_of_item(field_id.into()); + let field_ty = self.gcx.type_of_item(field_id.into()); self.resolve_type(field_ty, subtypes)?; } name diff --git a/crates/forge/src/cmd/lint.rs b/crates/forge/src/cmd/lint.rs index d4c5cb43f959b..dc317ecd60073 100644 --- a/crates/forge/src/cmd/lint.rs +++ b/crates/forge/src/cmd/lint.rs @@ -5,7 +5,7 @@ use forge_lint::{ sol::{SolLint, SolLintError, SolidityLinter}, }; use foundry_cli::{ - opts::{BuildOpts, solar_pcx_from_build_opts}, + opts::{BuildOpts, configure_pcx}, utils::{FoundryPathExt, LoadConfig}, }; use foundry_compilers::{solc::SolcLanguage, utils::SOLC_EXTENSIONS}; @@ -97,7 +97,7 @@ impl LintArgs { // Override default severity config with user-defined severity let severity = match self.severity { Some(target) => target, - None => config.lint.severity, + None => config.lint.severity.clone(), }; if project.compiler.solc.is_none() { @@ -112,13 +112,15 @@ impl LintArgs { .with_severity(if severity.is_empty() { None } else { Some(severity) }) .with_mixed_case_exceptions(&config.lint.mixed_case_exceptions); - let sess = linter.init(); - - let pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; - linter.early_lint(&input, pcx); - - let pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; - linter.late_lint(&input, pcx); + let mut compiler = linter.init(); + compiler.enter_mut(|compiler| -> Result<()> { + let mut pcx = compiler.parse(); + configure_pcx(&mut pcx, &config, Some(&project), Some(&input))?; + pcx.parse(); + let _ = compiler.lower_asts(); + Ok(()) + })?; + linter.lint(&input, &mut compiler); Ok(()) } diff --git a/crates/forge/src/cmd/tree.rs b/crates/forge/src/cmd/tree.rs index 30184af5d138e..84c4ea7b36d54 100644 --- a/crates/forge/src/cmd/tree.rs +++ b/crates/forge/src/cmd/tree.rs @@ -3,7 +3,7 @@ use eyre::Result; use foundry_cli::{opts::ProjectPathOpts, utils::LoadConfig}; use foundry_compilers::{ Graph, - resolver::{Charset, TreeOptions, parse::SolData}, + resolver::{Charset, TreeOptions}, }; /// CLI arguments for `forge tree`. @@ -28,7 +28,7 @@ foundry_config::impl_figment_convert!(TreeArgs, project_paths); impl TreeArgs { pub fn run(self) -> Result<()> { let config = self.load_config()?; - let graph = Graph::::resolve(&config.project_paths())?; + let graph = ::resolve(&config.project_paths())?; let opts = TreeOptions { charset: self.charset, no_dedupe: self.no_dedupe }; graph.print_with_options(opts); diff --git a/crates/lint/Cargo.toml b/crates/lint/Cargo.toml index c48dcfb85e0ed..da634dc771d4f 100644 --- a/crates/lint/Cargo.toml +++ b/crates/lint/Cargo.toml @@ -19,11 +19,9 @@ foundry-common.workspace = true foundry-compilers.workspace = true foundry-config.workspace = true -solar-parse.workspace = true -solar-ast.workspace = true +solar.workspace = true +# TODO: remove in next solar release: https://github.com/paradigmxyz/solar/pull/444 solar-interface = { workspace = true, features = ["json"] } -solar-data-structures.workspace = true -solar-sema.workspace = true heck.workspace = true rayon.workspace = true diff --git a/crates/lint/src/inline_config.rs b/crates/lint/src/inline_config.rs index b7512c060216e..f62d7182b60c8 100644 --- a/crates/lint/src/inline_config.rs +++ b/crates/lint/src/inline_config.rs @@ -1,7 +1,9 @@ -use solar_ast::{Item, SourceUnit, visit::Visit as VisitAst}; -use solar_interface::SourceMap; -use solar_parse::ast::Span; -use solar_sema::hir::{self, Visit as VisitHir}; +use solar::{ + ast::{Item, SourceUnit, visit::Visit as VisitAst}, + interface::SourceMap, + parse::ast::Span, + sema::hir::{self, Visit as VisitHir}, +}; use std::{collections::HashMap, fmt, marker::PhantomData, ops::ControlFlow}; /// An inline config item diff --git a/crates/lint/src/lib.rs b/crates/lint/src/lib.rs index 14d34cc30b306..e09fea757fb4b 100644 --- a/crates/lint/src/lib.rs +++ b/crates/lint/src/lib.rs @@ -1,6 +1,10 @@ #![doc = include_str!("../README.md")] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![allow(elided_lifetimes_in_paths)] + +// Feature. +use solar_interface as _; pub mod inline_config; pub mod linter; diff --git a/crates/lint/src/linter/early.rs b/crates/lint/src/linter/early.rs index 99a4812638f9f..dae64bb409657 100644 --- a/crates/lint/src/linter/early.rs +++ b/crates/lint/src/linter/early.rs @@ -1,45 +1,41 @@ -use solar_ast::{self as ast, visit::Visit}; -use solar_interface::data_structures::Never; -use std::ops::ControlFlow; - use super::LintContext; +use solar::{ + ast::{self as ast, visit::Visit}, + interface::data_structures::Never, +}; +use std::ops::ControlFlow; /// Trait for lints that operate directly on the AST. /// Its methods mirror `ast::visit::Visit`, with the addition of `LintCotext`. pub trait EarlyLintPass<'ast>: Send + Sync { - fn check_expr(&mut self, _ctx: &LintContext<'_>, _expr: &'ast ast::Expr<'ast>) {} - fn check_item_struct(&mut self, _ctx: &LintContext<'_>, _struct: &'ast ast::ItemStruct<'ast>) {} - fn check_item_function( - &mut self, - _ctx: &LintContext<'_>, - _func: &'ast ast::ItemFunction<'ast>, - ) { - } + fn check_expr(&mut self, _ctx: &LintContext, _expr: &'ast ast::Expr<'ast>) {} + fn check_item_struct(&mut self, _ctx: &LintContext, _struct: &'ast ast::ItemStruct<'ast>) {} + fn check_item_function(&mut self, _ctx: &LintContext, _func: &'ast ast::ItemFunction<'ast>) {} fn check_variable_definition( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _var: &'ast ast::VariableDefinition<'ast>, ) { } fn check_import_directive( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _import: &'ast ast::ImportDirective<'ast>, ) { } fn check_using_directive( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _using: &'ast ast::UsingDirective<'ast>, ) { } fn check_item_contract( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _contract: &'ast ast::ItemContract<'ast>, ) { } - fn check_doc_comment(&mut self, _ctx: &LintContext<'_>, _cmnt: &'ast ast::DocComment) {} + fn check_doc_comment(&mut self, _ctx: &LintContext, _cmnt: &'ast ast::DocComment) {} // TODO: Add methods for each required AST node type /// Should be called after the source unit has been visited. Enables lints that require @@ -63,7 +59,7 @@ pub trait EarlyLintPass<'ast>: Send + Sync { /// ``` fn check_full_source_unit( &mut self, - _ctx: &LintContext<'ast>, + _ctx: &LintContext<'ast, '_>, _ast: &'ast ast::SourceUnit<'ast>, ) { } @@ -71,7 +67,7 @@ pub trait EarlyLintPass<'ast>: Send + Sync { /// Visitor struct for `EarlyLintPass`es pub struct EarlyLintVisitor<'a, 's, 'ast> { - pub ctx: &'a LintContext<'s>, + pub ctx: &'a LintContext<'s, 'a>, pub passes: &'a mut [Box + 's>], } @@ -80,7 +76,7 @@ where 's: 'ast, { pub fn new( - ctx: &'a LintContext<'s>, + ctx: &'a LintContext<'s, 'a>, passes: &'a mut [Box + 's>], ) -> Self { Self { ctx, passes } @@ -175,6 +171,6 @@ where self.walk_item_contract(contract) } - // TODO: Add methods for each required AST node type, mirroring `solar_ast::visit::Visit` method - // sigs + adding `LintContext` + // TODO: Add methods for each required AST node type, mirroring `solar::ast::visit::Visit` + // method sigs + adding `LintContext` } diff --git a/crates/lint/src/linter/late.rs b/crates/lint/src/linter/late.rs index 597f228b92e34..1e967a4130a65 100644 --- a/crates/lint/src/linter/late.rs +++ b/crates/lint/src/linter/late.rs @@ -1,5 +1,4 @@ -use solar_interface::data_structures::Never; -use solar_sema::hir; +use solar::{interface::data_structures::Never, sema::hir}; use std::ops::ControlFlow; use super::LintContext; @@ -9,98 +8,98 @@ use super::LintContext; pub trait LateLintPass<'hir>: Send + Sync { fn check_nested_source( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _id: hir::SourceId, ) { } fn check_nested_item( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _id: &'hir hir::ItemId, ) { } fn check_nested_contract( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _id: &'hir hir::ContractId, ) { } fn check_nested_function( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _id: &'hir hir::FunctionId, ) { } fn check_nested_var( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _id: &'hir hir::VariableId, ) { } fn check_item( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _item: hir::Item<'hir, 'hir>, ) { } fn check_contract( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _contract: &'hir hir::Contract<'hir>, ) { } fn check_function( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _func: &'hir hir::Function<'hir>, ) { } fn check_modifier( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _mod: &'hir hir::Modifier<'hir>, ) { } fn check_var( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _var: &'hir hir::Variable<'hir>, ) { } fn check_expr( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _expr: &'hir hir::Expr<'hir>, ) { } fn check_call_args( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _args: &'hir hir::CallArgs<'hir>, ) { } fn check_stmt( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _stmt: &'hir hir::Stmt<'hir>, ) { } fn check_ty( &mut self, - _ctx: &LintContext<'_>, + _ctx: &LintContext, _hir: &'hir hir::Hir<'hir>, _ty: &'hir hir::Type<'hir>, ) { @@ -109,7 +108,7 @@ pub trait LateLintPass<'hir>: Send + Sync { /// Visitor struct for `LateLintPass`es pub struct LateLintVisitor<'a, 's, 'hir> { - ctx: &'a LintContext<'s>, + ctx: &'a LintContext<'s, 'a>, passes: &'a mut [Box + 's>], hir: &'hir hir::Hir<'hir>, } @@ -119,7 +118,7 @@ where 's: 'hir, { pub fn new( - ctx: &'a LintContext<'s>, + ctx: &'a LintContext<'s, 'a>, passes: &'a mut [Box + 's>], hir: &'hir hir::Hir<'hir>, ) -> Self { diff --git a/crates/lint/src/linter/mod.rs b/crates/lint/src/linter/mod.rs index 552bb88008b8c..bd2a11fac9057 100644 --- a/crates/lint/src/linter/mod.rs +++ b/crates/lint/src/linter/mod.rs @@ -6,40 +6,42 @@ pub use late::{LateLintPass, LateLintVisitor}; use foundry_compilers::Language; use foundry_config::lint::Severity; -use solar_interface::{ - Session, Span, - diagnostics::{DiagBuilder, DiagId, DiagMsg, MultiSpan, Style}, +use solar::{ + interface::{ + Session, Span, + diagnostics::{DiagBuilder, DiagId, DiagMsg, MultiSpan, Style}, + }, + sema::Compiler, }; -use solar_sema::ParsingContext; use std::path::PathBuf; use crate::inline_config::InlineConfig; /// Trait representing a generic linter for analyzing and reporting issues in smart contract source -/// code files. A linter can be implemented for any smart contract language supported by Foundry. -/// -/// # Type Parameters -/// -/// - `Language`: Represents the target programming language. Must implement the [`Language`] trait. -/// - `Lint`: Represents the types of lints performed by the linter. Must implement the [`Lint`] -/// trait. -/// -/// # Required Methods -/// -/// - `init`: Creates a new solar `Session` with the appropriate linter configuration. -/// - `early_lint`: Scans the source files (using the AST) emitting a diagnostic for lints found. -/// - `late_lint`: Scans the source files (using the HIR) emitting a diagnostic for lints found. +/// code files. /// -/// # Note: -/// -/// - For `early_lint` and `late_lint`, the `ParsingContext` should have the sources pre-loaded. +/// A linter can be implemented for any smart contract language supported by Foundry. pub trait Linter: Send + Sync { + /// The target [`Language`]. type Language: Language; + /// The [`Lint`] type. type Lint: Lint; - fn init(&self) -> Session; - fn early_lint<'sess>(&self, input: &[PathBuf], pcx: ParsingContext<'sess>); - fn late_lint<'sess>(&self, input: &[PathBuf], pcx: ParsingContext<'sess>); + /// Build a solar [`Compiler`] from the given linter config. + fn init(&self) -> Compiler { + let mut compiler = Compiler::new(Session::builder().with_stderr_emitter().build()); + self.configure(&mut compiler); + compiler + } + + /// Configure a solar [`Compiler`] from the given linter config. + fn configure(&self, compiler: &mut Compiler); + + /// Run all lints. + /// + /// The `compiler` should have already been configured with all the sources necessary, + /// as well as having performed parsing and lowering. + fn lint(&self, input: &[PathBuf], compiler: &mut Compiler); } pub trait Lint { @@ -49,10 +51,10 @@ pub trait Lint { fn help(&self) -> &'static str; } -pub struct LintContext<'s> { +pub struct LintContext<'s, 'c> { sess: &'s Session, with_description: bool, - pub config: LinterConfig<'s>, + pub config: LinterConfig<'c>, active_lints: Vec<&'static str>, } @@ -61,11 +63,11 @@ pub struct LinterConfig<'s> { pub mixed_case_exceptions: &'s [String], } -impl<'s> LintContext<'s> { +impl<'s, 'c> LintContext<'s, 'c> { pub fn new( sess: &'s Session, with_description: bool, - config: LinterConfig<'s>, + config: LinterConfig<'c>, active_lints: Vec<&'static str>, ) -> Self { Self { sess, with_description, config, active_lints } @@ -192,7 +194,7 @@ pub enum Snippet { } impl Snippet { - pub fn to_note(self, ctx: &LintContext<'_>) -> Vec<(DiagMsg, Style)> { + pub fn to_note(self, ctx: &LintContext) -> Vec<(DiagMsg, Style)> { let mut output = if let Some(desc) = self.desc() { vec![(DiagMsg::from(desc), Style::NoStyle), (DiagMsg::from("\n\n"), Style::NoStyle)] } else { diff --git a/crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs b/crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs index 85714780cda25..cfb4fea29799f 100644 --- a/crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs +++ b/crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs @@ -3,8 +3,10 @@ use crate::{ linter::{LateLintPass, LintContext, Snippet}, sol::{Severity, SolLint}, }; -use solar_ast::{self as ast, Span}; -use solar_sema::hir::{self, Res}; +use solar::{ + ast::{self as ast, Span}, + sema::hir::{self, Res}, +}; declare_forge_lint!( UNWRAPPED_MODIFIER_LOGIC, @@ -16,7 +18,7 @@ declare_forge_lint!( impl<'hir> LateLintPass<'hir> for UnwrappedModifierLogic { fn check_function( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, hir: &'hir hir::Hir<'hir>, func: &'hir hir::Function<'hir>, ) { @@ -92,7 +94,7 @@ impl UnwrappedModifierLogic { fn get_snippet<'a>( &self, - ctx: &LintContext<'_>, + ctx: &LintContext, hir: &hir::Hir<'_>, func: &hir::Function<'_>, before: &'a [hir::Stmt<'a>], diff --git a/crates/lint/src/sol/gas/keccak.rs b/crates/lint/src/sol/gas/keccak.rs index 0bb129aef50aa..cb942510bbb49 100644 --- a/crates/lint/src/sol/gas/keccak.rs +++ b/crates/lint/src/sol/gas/keccak.rs @@ -3,9 +3,11 @@ use crate::{ linter::{LateLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{self as ast, Span}; -use solar_interface::kw; -use solar_sema::hir::{self}; +use solar::{ + ast::{self as ast, Span}, + interface::kw, + sema::hir::{self}, +}; declare_forge_lint!( ASM_KECCAK256, @@ -17,7 +19,7 @@ declare_forge_lint!( impl<'hir> LateLintPass<'hir> for AsmKeccak256 { fn check_stmt( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, hir: &'hir hir::Hir<'hir>, stmt: &'hir hir::Stmt<'hir>, ) { @@ -61,7 +63,7 @@ impl AsmKeccak256 { /// Emits lints (when possible with fix suggestions) for inefficient `keccak256` calls. fn emit_lint( &self, - ctx: &LintContext<'_>, + ctx: &LintContext, _hir: &hir::Hir<'_>, _stmt_span: Span, call: &hir::Expr<'_>, diff --git a/crates/lint/src/sol/high/incorrect_shift.rs b/crates/lint/src/sol/high/incorrect_shift.rs index dbc8327138a98..2ad60b9a793f7 100644 --- a/crates/lint/src/sol/high/incorrect_shift.rs +++ b/crates/lint/src/sol/high/incorrect_shift.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{BinOp, BinOpKind, Expr, ExprKind}; +use solar::ast::{BinOp, BinOpKind, Expr, ExprKind}; declare_forge_lint!( INCORRECT_SHIFT, @@ -13,7 +13,7 @@ declare_forge_lint!( ); impl<'ast> EarlyLintPass<'ast> for IncorrectShift { - fn check_expr(&mut self, ctx: &LintContext<'_>, expr: &'ast Expr<'ast>) { + fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) { if let ExprKind::Binary( left_expr, BinOp { kind: BinOpKind::Shl | BinOpKind::Shr, .. }, diff --git a/crates/lint/src/sol/high/unchecked_calls.rs b/crates/lint/src/sol/high/unchecked_calls.rs index 65eef5fee6cb2..5d6a9bb713e9c 100644 --- a/crates/lint/src/sol/high/unchecked_calls.rs +++ b/crates/lint/src/sol/high/unchecked_calls.rs @@ -3,8 +3,10 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{Expr, ExprKind, ItemFunction, Stmt, StmtKind, visit::Visit}; -use solar_interface::kw; +use solar::{ + ast::{Expr, ExprKind, ItemFunction, Stmt, StmtKind, visit::Visit}, + interface::kw, +}; use std::ops::ControlFlow; declare_forge_lint!( @@ -26,7 +28,7 @@ declare_forge_lint!( /// WARN: can issue false positives. It does not check that the contract being called is an ERC20. /// TODO: re-implement using `LateLintPass` so that it can't issue false positives. impl<'ast> EarlyLintPass<'ast> for UncheckedTransferERC20 { - fn check_item_function(&mut self, ctx: &LintContext<'_>, func: &'ast ItemFunction<'ast>) { + fn check_item_function(&mut self, ctx: &LintContext, func: &'ast ItemFunction<'ast>) { if let Some(body) = &func.body { let mut checker = UncheckedTransferERC20Checker { ctx }; let _ = checker.visit_block(body); @@ -40,7 +42,7 @@ impl<'ast> EarlyLintPass<'ast> for UncheckedTransferERC20 { /// When a transfer's return value is used (in require, assignment, etc.), it's part /// of a larger expression and won't be flagged. struct UncheckedTransferERC20Checker<'a, 's> { - ctx: &'a LintContext<'s>, + ctx: &'a LintContext<'s, 'a>, } impl<'ast> Visit<'ast> for UncheckedTransferERC20Checker<'_, '_> { @@ -77,7 +79,7 @@ fn is_erc20_transfer_call(expr: &Expr<'_>) -> bool { // -- UNCKECKED LOW-LEVEL CALLS ------------------------------------------------------------------- impl<'ast> EarlyLintPass<'ast> for UncheckedCall { - fn check_item_function(&mut self, ctx: &LintContext<'_>, func: &'ast ItemFunction<'ast>) { + fn check_item_function(&mut self, ctx: &LintContext, func: &'ast ItemFunction<'ast>) { if let Some(body) = &func.body { let mut checker = UncheckedCallChecker { ctx }; let _ = checker.visit_block(body); @@ -91,7 +93,7 @@ impl<'ast> EarlyLintPass<'ast> for UncheckedCall { /// statements. When the success value is checked (in require, if, etc.), the call /// is part of a larger expression and won't be flagged. struct UncheckedCallChecker<'a, 's> { - ctx: &'a LintContext<'s>, + ctx: &'a LintContext<'s, 'a>, } impl<'ast> Visit<'ast> for UncheckedCallChecker<'_, '_> { diff --git a/crates/lint/src/sol/info/imports.rs b/crates/lint/src/sol/info/imports.rs index 7736b4e8f0f21..99729404ecd56 100644 --- a/crates/lint/src/sol/info/imports.rs +++ b/crates/lint/src/sol/info/imports.rs @@ -1,6 +1,8 @@ -use solar_ast::{self as ast, SourceUnit, Span, Symbol, visit::Visit}; -use solar_data_structures::map::FxIndexSet; -use solar_interface::SourceMap; +use solar::{ + ast::{self as ast, SourceUnit, Span, Symbol, visit::Visit}, + data_structures::map::FxIndexSet, + interface::SourceMap, +}; use std::ops::ControlFlow; use super::Imports; @@ -26,7 +28,7 @@ declare_forge_lint!( impl<'ast> EarlyLintPass<'ast> for Imports { fn check_import_directive( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, import: &'ast ast::ImportDirective<'ast>, ) { // Non-aliased plain imports like `import "File.sol";`. @@ -37,7 +39,7 @@ impl<'ast> EarlyLintPass<'ast> for Imports { } } - fn check_full_source_unit(&mut self, ctx: &LintContext<'ast>, ast: &'ast SourceUnit<'ast>) { + fn check_full_source_unit(&mut self, ctx: &LintContext<'ast, '_>, ast: &'ast SourceUnit<'ast>) { // Despite disabled lints are filtered inside `ctx.emit()`, we explicitly check // upfront to avoid the expensive full source unit traversal when unnecessary. if ctx.is_lint_enabled(UNUSED_IMPORT.id) { @@ -70,7 +72,7 @@ impl<'ast> UnusedChecker<'ast> { } /// Check for unused imports and emit warnings. - fn check_unused_imports(&self, ast: &SourceUnit<'_>, ctx: &LintContext<'_>) { + fn check_unused_imports(&self, ast: &SourceUnit<'_>, ctx: &LintContext) { for item in ast.items.iter() { let span = item.span; let ast::ItemKind::Import(import) = &item.kind else { continue }; @@ -94,13 +96,13 @@ impl<'ast> UnusedChecker<'ast> { } } - fn unused_import(&self, ctx: &LintContext<'_>, span: Span) { + fn unused_import(&self, ctx: &LintContext, span: Span) { ctx.emit(&UNUSED_IMPORT, span); } } impl<'ast> Visit<'ast> for UnusedChecker<'ast> { - type BreakValue = solar_data_structures::Never; + type BreakValue = solar::data_structures::Never; fn visit_item(&mut self, item: &'ast ast::Item<'ast>) -> ControlFlow { if let ast::ItemKind::Import(_) = &item.kind { @@ -128,20 +130,6 @@ impl<'ast> Visit<'ast> for UnusedChecker<'ast> { self.walk_using_directive(using) } - fn visit_function_header( - &mut self, - header: &'ast solar_ast::FunctionHeader<'ast>, - ) -> ControlFlow { - // temporary workaround until solar also visits `override` and its paths . - if let Some(ref override_) = header.override_ { - for path in override_.paths.iter() { - _ = self.visit_path(path); - } - } - - self.walk_function_header(header) - } - fn visit_expr(&mut self, expr: &'ast ast::Expr<'ast>) -> ControlFlow { if let ast::ExprKind::Ident(id) = expr.kind { self.mark_symbol_used(id.name); @@ -168,7 +156,7 @@ impl<'ast> Visit<'ast> for UnusedChecker<'ast> { fn visit_doc_comment( &mut self, - cmnt: &'ast solar_ast::DocComment, + cmnt: &'ast solar::ast::DocComment, ) -> ControlFlow { if let Ok(snip) = self.source_map.span_to_snippet(cmnt.span) { for line in snip.lines() { diff --git a/crates/lint/src/sol/info/mixed_case.rs b/crates/lint/src/sol/info/mixed_case.rs index 9063e08ce097c..c390b357a7af8 100644 --- a/crates/lint/src/sol/info/mixed_case.rs +++ b/crates/lint/src/sol/info/mixed_case.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint, info::screaming_snake_case::is_screaming_snake_case}, }; -use solar_ast::{FunctionHeader, ItemFunction, VariableDefinition, Visibility}; +use solar::ast::{FunctionHeader, ItemFunction, VariableDefinition, Visibility}; declare_forge_lint!( MIXED_CASE_FUNCTION, @@ -13,7 +13,7 @@ declare_forge_lint!( ); impl<'ast> EarlyLintPass<'ast> for MixedCaseFunction { - fn check_item_function(&mut self, ctx: &LintContext<'_>, func: &'ast ItemFunction<'ast>) { + fn check_item_function(&mut self, ctx: &LintContext, func: &'ast ItemFunction<'ast>) { if let Some(name) = func.header.name && !is_mixed_case(name.as_str(), true, ctx.config.mixed_case_exceptions) && !is_constant_getter(&func.header) @@ -33,7 +33,7 @@ declare_forge_lint!( impl<'ast> EarlyLintPass<'ast> for MixedCaseVariable { fn check_variable_definition( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, var: &'ast VariableDefinition<'ast>, ) { if var.mutability.is_none() diff --git a/crates/lint/src/sol/info/pascal_case.rs b/crates/lint/src/sol/info/pascal_case.rs index b8e719aa0d762..bdb8b649896e9 100644 --- a/crates/lint/src/sol/info/pascal_case.rs +++ b/crates/lint/src/sol/info/pascal_case.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::ItemStruct; +use solar::ast::ItemStruct; declare_forge_lint!( PASCAL_CASE_STRUCT, @@ -13,7 +13,7 @@ declare_forge_lint!( ); impl<'ast> EarlyLintPass<'ast> for PascalCaseStruct { - fn check_item_struct(&mut self, ctx: &LintContext<'_>, strukt: &'ast ItemStruct<'ast>) { + fn check_item_struct(&mut self, ctx: &LintContext, strukt: &'ast ItemStruct<'ast>) { let name = strukt.name.as_str(); if name.len() > 1 && !is_pascal_case(name) { ctx.emit(&PASCAL_CASE_STRUCT, strukt.name.span); diff --git a/crates/lint/src/sol/info/screaming_snake_case.rs b/crates/lint/src/sol/info/screaming_snake_case.rs index 28ca4c54c7b26..17ea949dd0a34 100644 --- a/crates/lint/src/sol/info/screaming_snake_case.rs +++ b/crates/lint/src/sol/info/screaming_snake_case.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{VarMut, VariableDefinition}; +use solar::ast::{VarMut, VariableDefinition}; declare_forge_lint!( SCREAMING_SNAKE_CASE_CONSTANT, @@ -22,7 +22,7 @@ declare_forge_lint!( impl<'ast> EarlyLintPass<'ast> for ScreamingSnakeCase { fn check_variable_definition( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, var: &'ast VariableDefinition<'ast>, ) { if let (Some(name), Some(mutability)) = (var.name, var.mutability) { diff --git a/crates/lint/src/sol/info/unsafe_cheatcodes.rs b/crates/lint/src/sol/info/unsafe_cheatcodes.rs index 3b48129b82a9a..f5f6efcb75a53 100644 --- a/crates/lint/src/sol/info/unsafe_cheatcodes.rs +++ b/crates/lint/src/sol/info/unsafe_cheatcodes.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{Expr, ExprKind}; +use solar::ast::{Expr, ExprKind}; declare_forge_lint!( UNSAFE_CHEATCODE_USAGE, @@ -25,7 +25,7 @@ const UNSAFE_CHEATCODES: [&str; 9] = [ ]; impl<'ast> EarlyLintPass<'ast> for UnsafeCheatcodes { - fn check_expr(&mut self, ctx: &LintContext<'_>, expr: &'ast Expr<'ast>) { + fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) { if let ExprKind::Call(lhs, _args) = &expr.kind && let ExprKind::Member(_lhs, member) = &lhs.kind && UNSAFE_CHEATCODES.iter().any(|&c| c == member.as_str()) diff --git a/crates/lint/src/sol/med/div_mul.rs b/crates/lint/src/sol/med/div_mul.rs index 3f8e25b9b8bb8..6640a1ea1f9f8 100644 --- a/crates/lint/src/sol/med/div_mul.rs +++ b/crates/lint/src/sol/med/div_mul.rs @@ -3,7 +3,7 @@ use crate::{ linter::{EarlyLintPass, LintContext}, sol::{Severity, SolLint}, }; -use solar_ast::{BinOp, BinOpKind, Expr, ExprKind}; +use solar::ast::{BinOp, BinOpKind, Expr, ExprKind}; declare_forge_lint!( DIVIDE_BEFORE_MULTIPLY, @@ -13,7 +13,7 @@ declare_forge_lint!( ); impl<'ast> EarlyLintPass<'ast> for DivideBeforeMultiply { - fn check_expr(&mut self, ctx: &LintContext<'_>, expr: &'ast Expr<'ast>) { + fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) { if let ExprKind::Binary(left_expr, BinOp { kind: BinOpKind::Mul, .. }, _) = &expr.kind && contains_division(left_expr) { diff --git a/crates/lint/src/sol/med/unsafe_typecast.rs b/crates/lint/src/sol/med/unsafe_typecast.rs index 6bced97d5c108..69afbc94fc7bb 100644 --- a/crates/lint/src/sol/med/unsafe_typecast.rs +++ b/crates/lint/src/sol/med/unsafe_typecast.rs @@ -3,8 +3,10 @@ use crate::{ linter::{LateLintPass, LintContext, Snippet}, sol::{Severity, SolLint}, }; -use solar_ast::{LitKind, StrKind}; -use solar_sema::hir::{self, ElementaryType, ExprKind, ItemId, Res, TypeKind}; +use solar::{ + ast::{LitKind, StrKind}, + sema::hir::{self, ElementaryType, ExprKind, ItemId, Res, TypeKind}, +}; declare_forge_lint!( UNSAFE_TYPECAST, @@ -16,7 +18,7 @@ declare_forge_lint!( impl<'hir> LateLintPass<'hir> for UnsafeTypecast { fn check_expr( &mut self, - ctx: &LintContext<'_>, + ctx: &LintContext, hir: &'hir hir::Hir<'hir>, expr: &'hir hir::Expr<'hir>, ) { diff --git a/crates/lint/src/sol/mod.rs b/crates/lint/src/sol/mod.rs index 56d4883f3a570..8e1d9bfcdb700 100644 --- a/crates/lint/src/sol/mod.rs +++ b/crates/lint/src/sol/mod.rs @@ -8,20 +8,22 @@ use crate::{ use foundry_common::comments::Comments; use foundry_compilers::{ProjectPathsConfig, solc::SolcLanguage}; use foundry_config::lint::Severity; -use rayon::iter::{ParallelBridge, ParallelIterator}; -use solar_ast::{self as ast, visit::Visit as VisitAST}; -use solar_interface::{ - Session, SourceMap, - diagnostics::{self, DiagCtxt, JsonEmitter}, - source_map::{FileName, SourceFile}, -}; -use solar_sema::{ - ParsingContext, - hir::{self, Visit as VisitHIR}, +use rayon::prelude::*; +use solar::{ + ast::{self as ast, visit::Visit as VisitAST}, + interface::{ + Session, + diagnostics::{self, HumanEmitter, JsonEmitter}, + source_map::{FileName, SourceFile}, + }, + sema::{ + Compiler, Gcx, + hir::{self, Visit as VisitHIR}, + }, }; use std::{ path::{Path, PathBuf}, - sync::{Arc, LazyLock}, + sync::LazyLock, }; use thiserror::Error; @@ -110,10 +112,10 @@ impl<'a> SolidityLinter<'a> { && !self.lints_excluded.as_ref().is_some_and(|excl| excl.contains(&lint)) } - fn process_source_ast<'ast>( - &'ast self, - sess: &'ast Session, - ast: &'ast ast::SourceUnit<'ast>, + fn process_source_ast<'gcx>( + &self, + sess: &'gcx Session, + ast: &'gcx ast::SourceUnit<'gcx>, file: &SourceFile, path: &Path, ) -> Result<(), diagnostics::ErrorGuaranteed> { @@ -159,12 +161,11 @@ impl<'a> SolidityLinter<'a> { Ok(()) } - fn process_source_hir<'hir>( + fn process_source_hir<'gcx>( &self, - sess: &Session, - gcx: &solar_sema::ty::Gcx<'hir>, + gcx: Gcx<'gcx>, source_id: hir::SourceId, - file: &'hir SourceFile, + file: &'gcx SourceFile, ) -> Result<(), diagnostics::ErrorGuaranteed> { // Declare all available passes and lints let mut passes_and_lints = Vec::new(); @@ -173,7 +174,7 @@ impl<'a> SolidityLinter<'a> { passes_and_lints.extend(info::create_late_lint_passes()); // Do not apply 'gas' and 'codesize' severity rules on tests and scripts - if let FileName::Real(ref path) = file.name + if let FileName::Real(path) = &file.name && !self.path_config.is_test_or_script(path) { passes_and_lints.extend(gas::create_late_lint_passes()); @@ -199,15 +200,19 @@ impl<'a> SolidityLinter<'a> { // Process the inline-config let comments = Comments::new(file); - let inline_config = - parse_inline_config(sess, &comments, InlineConfigSource::Hir((&gcx.hir, source_id))); + let inline_config = parse_inline_config( + gcx.sess, + &comments, + InlineConfigSource::Hir((&gcx.hir, source_id)), + ); // Run late lint visitor - let ctx = LintContext::new(sess, self.with_description, self.config(inline_config), lints); + let ctx = + LintContext::new(gcx.sess, self.with_description, self.config(inline_config), lints); let mut late_visitor = LateLintVisitor::new(&ctx, &mut passes, &gcx.hir); // Visit this specific source - _ = late_visitor.visit_nested_source(source_id); + let _ = late_visitor.visit_nested_source(source_id); Ok(()) } @@ -217,73 +222,40 @@ impl<'a> Linter for SolidityLinter<'a> { type Language = SolcLanguage; type Lint = SolLint; - /// Build solar session based on the linter config - fn init(&self) -> Session { - let mut builder = Session::builder(); - if self.with_json_emitter { - let map = Arc::::default(); - let json_emitter = JsonEmitter::new(Box::new(std::io::stderr()), map.clone()) - .rustc_like(true) - .ui_testing(false); - - builder = builder.dcx(DiagCtxt::new(Box::new(json_emitter))).source_map(map); + fn configure(&self, compiler: &mut Compiler) { + let dcx = compiler.dcx_mut(); + let sm = dcx.source_map_mut().unwrap().clone(); + dcx.set_emitter(if self.with_json_emitter { + let writer = Box::new(std::io::BufWriter::new(std::io::stderr())); + let json_emitter = JsonEmitter::new(writer, sm).rustc_like(true).ui_testing(false); + Box::new(json_emitter) } else { - builder = builder.with_stderr_emitter(); - }; - - // Create a single session for all files - let mut sess = builder.build(); - sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); - sess + Box::new(HumanEmitter::stderr(Default::default()).source_map(Some(sm))) + }); + dcx.set_flags_mut(|f| f.track_diagnostics = false); } - /// Run AST-based lints. - /// - /// Note: the `ParsingContext` should already have the sources loaded. - fn early_lint<'sess>(&self, input: &[PathBuf], pcx: ParsingContext<'sess>) { - let sess = pcx.sess; - _ = sess.enter_parallel(|| -> Result<(), diagnostics::ErrorGuaranteed> { - // Parse the sources - let ast_arena = solar_sema::thread_local::ThreadLocal::new(); - let ast_result = pcx.parse(&ast_arena); - - // Process each source in parallel - ast_result.sources.iter().par_bridge().for_each(|source| { + fn lint(&self, input: &[PathBuf], compiler: &mut Compiler) { + compiler.enter_mut(|compiler| { + let gcx = compiler.gcx(); + + // Early lints. + gcx.sources.raw.par_iter().for_each(|source| { if let (FileName::Real(path), Some(ast)) = (&source.file.name, &source.ast) && input.iter().any(|input_path| path.ends_with(input_path)) { - _ = self.process_source_ast(sess, ast, &source.file, path) + let _ = self.process_source_ast(gcx.sess, ast, &source.file, path); } }); - Ok(()) - }); - } - - /// Run HIR-based lints. - /// - /// Note: the `ParsingContext` should already have the sources loaded. - fn late_lint<'sess>(&self, input: &[PathBuf], pcx: ParsingContext<'sess>) { - let sess = pcx.sess; - _ = sess.enter_parallel(|| -> Result<(), diagnostics::ErrorGuaranteed> { - // Parse and lower to HIR - let hir_arena = solar_sema::thread_local::ThreadLocal::new(); - let hir_result = pcx.parse_and_lower(&hir_arena); - - if let Ok(Some(gcx_wrapper)) = hir_result { - let gcx = gcx_wrapper.get(); - - // Process each source in parallel - gcx.hir.sources_enumerated().par_bridge().for_each(|(source_id, source)| { - if let FileName::Real(ref path) = source.file.name - && input.iter().any(|input_path| path.ends_with(input_path)) - { - _ = self.process_source_hir(sess, &gcx, source_id, &source.file); - } - }); - } - - Ok(()) + // Late lints. + gcx.hir.par_sources_enumerated().for_each(|(source_id, source)| { + if let FileName::Real(path) = &source.file.name + && input.iter().any(|input_path| path.ends_with(input_path)) + { + let _ = self.process_source_hir(gcx, source_id, &source.file); + } + }); }); } } diff --git a/crates/verify/src/provider.rs b/crates/verify/src/provider.rs index 95546011aa4a8..8b9e1d91e4ba5 100644 --- a/crates/verify/src/provider.rs +++ b/crates/verify/src/provider.rs @@ -10,8 +10,8 @@ use foundry_common::compile::ProjectCompiler; use foundry_compilers::{ Graph, Project, artifacts::{Metadata, Source, output_selection::OutputSelection}, - compilers::{multi::MultiCompilerParsedSource, solc::SolcCompiler}, - multi::MultiCompilerSettings, + compilers::solc::SolcCompiler, + multi::{MultiCompilerParser, MultiCompilerSettings}, solc::Solc, }; use foundry_config::Config; @@ -88,8 +88,7 @@ impl VerificationContext { pub fn get_target_imports(&self) -> Result> { let mut sources = self.project.paths.read_input_files()?; sources.insert(self.target_path.clone(), Source::read(&self.target_path)?); - let graph = - Graph::::resolve_sources(&self.project.paths, sources)?; + let graph = Graph::::resolve_sources(&self.project.paths, sources)?; Ok(graph.imports(&self.target_path).into_iter().map(Into::into).collect()) }