diff --git a/crates/artifacts/solc/src/contract.rs b/crates/artifacts/solc/src/contract.rs index 58931952f..a2ae0a6dc 100644 --- a/crates/artifacts/solc/src/contract.rs +++ b/crates/artifacts/solc/src/contract.rs @@ -455,7 +455,7 @@ pub struct CompactContractRefSome<'a> { pub bin_runtime: &'a BytecodeObject, } -impl<'a> CompactContractRefSome<'a> { +impl CompactContractRefSome<'_> { /// Returns the individual parts of this contract. /// /// If the values are `None`, then `Default` is returned. diff --git a/crates/artifacts/solc/src/lib.rs b/crates/artifacts/solc/src/lib.rs index 1fb8b3c74..b3c92595a 100644 --- a/crates/artifacts/solc/src/lib.rs +++ b/crates/artifacts/solc/src/lib.rs @@ -1150,7 +1150,7 @@ impl<'de> Deserialize<'de> for LosslessMetadata { { struct LosslessMetadataVisitor; - impl<'de> Visitor<'de> for LosslessMetadataVisitor { + impl Visitor<'_> for LosslessMetadataVisitor { type Value = LosslessMetadata; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/crates/artifacts/solc/src/sourcemap.rs b/crates/artifacts/solc/src/sourcemap.rs index 3260472d6..aab1e5f85 100644 --- a/crates/artifacts/solc/src/sourcemap.rs +++ b/crates/artifacts/solc/src/sourcemap.rs @@ -92,7 +92,7 @@ enum Token<'a> { Regular, } -impl<'a> fmt::Debug for Token<'a> { +impl fmt::Debug for Token<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Token::Number(s) => write!(f, "NUMBER({s:?})"), @@ -105,7 +105,7 @@ impl<'a> fmt::Debug for Token<'a> { } } -impl<'a> fmt::Display for Token<'a> { +impl fmt::Display for Token<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Token::Number(_) => write!(f, "number"), @@ -531,7 +531,7 @@ impl<'input> Parser<'input> { } } -impl<'input> Iterator for Parser<'input> { +impl Iterator for Parser<'_> { type Item = Result; fn next(&mut self) -> Option { diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs index 681cd45af..493fe98b3 100644 --- a/crates/compilers/src/artifact_output/mod.rs +++ b/crates/compilers/src/artifact_output/mod.rs @@ -32,7 +32,7 @@ mod hh; pub use hh::*; use crate::{ - cache::{CachedArtifact, CompilerCache}, + cache::{CachedArtifacts, CompilerCache}, output::{ contracts::VersionedContracts, sources::{VersionedSourceFile, VersionedSourceFiles}, @@ -52,6 +52,7 @@ pub struct ArtifactId { pub version: Version, /// `solc` build id pub build_id: String, + pub profile: String, } impl ArtifactId { @@ -119,6 +120,7 @@ pub struct ArtifactFile { /// `solc` version that produced this artifact pub version: Version, pub build_id: String, + pub profile: String, } impl ArtifactFile { @@ -298,6 +300,7 @@ impl Artifacts { source: source.clone(), version: artifact.version.clone(), build_id: artifact.build_id.clone(), + profile: artifact.profile.clone(), } .with_slashed_paths(), &artifact.artifact, @@ -324,6 +327,7 @@ impl Artifacts { source: source.clone(), version: artifact.version, build_id: artifact.build_id.clone(), + profile: artifact.profile.clone(), } .with_slashed_paths(), artifact.artifact, @@ -642,14 +646,22 @@ pub trait ArtifactOutput { /// Returns the file name for the contract's artifact /// `Greeter.json` - fn output_file_name(name: &str) -> PathBuf { - format!("{name}.json").into() - } - - /// Returns the file name for the contract's artifact and the given version - /// `Greeter.0.8.11.json` - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - format!("{}.{}.{}.{}.json", name, version.major, version.minor, version.patch).into() + fn output_file_name( + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + let mut name = name.to_string(); + if with_version { + name.push_str(&format!(".{}.{}.{}", version.major, version.minor, version.patch)); + } + if with_profile { + name.push_str(&format!(".{profile}")); + } + name.push_str(".json"); + name.into() } /// Returns the appropriate file name for the conflicting file. @@ -724,24 +736,23 @@ pub trait ArtifactOutput { /// Returns the path to the contract's artifact location based on the contract's file and name /// /// This returns `contract.sol/contract.json` by default - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - contract_file - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name(name))) - .unwrap_or_else(|| Self::output_file_name(name)) - } - - /// Returns the path to the contract's artifact location based on the contract's file, name and - /// version - /// - /// This returns `contract.sol/contract.0.8.11.json` by default - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { + fn output_file( + contract_file: &Path, + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { contract_file .file_name() .map(Path::new) - .map(|p| p.join(Self::output_file_name_versioned(name, version))) - .unwrap_or_else(|| Self::output_file_name_versioned(name, version)) + .map(|p| { + p.join(Self::output_file_name(name, version, profile, with_version, with_profile)) + }) + .unwrap_or_else(|| { + Self::output_file_name(name, version, profile, with_version, with_profile) + }) } /// The inverse of `contract_file_name` @@ -752,11 +763,6 @@ pub trait ArtifactOutput { file.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) } - /// Whether the corresponding artifact of the given contract file and name exists - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - root.join(Self::output_file(contract_file, name)).exists() - } - /// Read the artifact that's stored at the given path /// /// # Errors @@ -800,6 +806,7 @@ pub trait ArtifactOutput { /// Generates a path for an artifact based on already taken paths by either cached or compiled /// artifacts. + #[allow(clippy::too_many_arguments)] fn get_artifact_path( ctx: &OutputContext<'_>, already_taken: &HashSet, @@ -807,21 +814,19 @@ pub trait ArtifactOutput { name: &str, artifacts_folder: &Path, version: &Version, - versioned: bool, + profile: &str, + with_version: bool, + with_profile: bool, ) -> PathBuf { // if an artifact for the contract already exists (from a previous compile job) // we reuse the path, this will make sure that even if there are conflicting // files (files for witch `T::output_file()` would return the same path) we use // consistent output paths - if let Some(existing_artifact) = ctx.existing_artifact(file, name, version) { + if let Some(existing_artifact) = ctx.existing_artifact(file, name, version, profile) { trace!("use existing artifact file {:?}", existing_artifact,); existing_artifact.to_path_buf() } else { - let path = if versioned { - Self::output_file_versioned(file, name, version) - } else { - Self::output_file(file, name) - }; + let path = Self::output_file(file, name, version, profile, with_version, with_profile); let path = artifacts_folder.join(path); @@ -854,7 +859,9 @@ pub trait ArtifactOutput { let mut taken_paths_lowercase = ctx .existing_artifacts .values() - .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) + .flat_map(|artifacts| artifacts.values()) + .flat_map(|artifacts| artifacts.values()) + .flat_map(|artifacts| artifacts.values()) .map(|a| a.path.to_slash_lossy().to_lowercase()) .collect::>(); @@ -865,14 +872,16 @@ pub trait ArtifactOutput { }); for file in files { for (name, versioned_contracts) in &contracts[file] { + let unique_versions = + versioned_contracts.iter().map(|c| &c.version).collect::>(); + let unique_profiles = + versioned_contracts.iter().map(|c| &c.profile).collect::>(); for contract in versioned_contracts { + non_standalone_sources.insert(file); + // track `SourceFile`s that can be mapped to contracts let source_file = sources.find_file_and_version(file, &contract.version); - if let Some(source) = source_file { - non_standalone_sources.insert((source.id, &contract.version)); - } - let artifact_path = Self::get_artifact_path( &ctx, &taken_paths_lowercase, @@ -880,7 +889,9 @@ pub trait ArtifactOutput { name, layout.artifacts.as_path(), &contract.version, - versioned_contracts.len() > 1, + &contract.profile, + unique_versions.len() > 1, + unique_profiles.len() > 1, ); taken_paths_lowercase.insert(artifact_path.to_slash_lossy().to_lowercase()); @@ -904,6 +915,7 @@ pub trait ArtifactOutput { file: artifact_path, version: contract.version.clone(), build_id: contract.build_id.clone(), + profile: contract.profile.clone(), }; artifacts @@ -921,8 +933,10 @@ pub trait ArtifactOutput { // any contract definition, which are not included in the `CompilerOutput` but we want to // create Artifacts for them regardless for (file, sources) in sources.as_ref().iter() { + let unique_versions = sources.iter().map(|s| &s.version).collect::>(); + let unique_profiles = sources.iter().map(|s| &s.profile).collect::>(); for source in sources { - if !non_standalone_sources.contains(&(source.source_file.id, &source.version)) { + if !non_standalone_sources.contains(file) { // scan the ast as a safe measure to ensure this file does not include any // source units // there's also no need to create a standalone artifact for source files that @@ -945,26 +959,26 @@ pub trait ArtifactOutput { name, &layout.artifacts, &source.version, - sources.len() > 1, + &source.profile, + unique_versions.len() > 1, + unique_profiles.len() > 1, ); - let entries = artifacts + taken_paths_lowercase + .insert(artifact_path.to_slash_lossy().to_lowercase()); + + artifacts .entry(file.clone()) .or_default() .entry(name.to_string()) - .or_default(); - - if entries.iter().all(|entry| entry.version != source.version) { - taken_paths_lowercase - .insert(artifact_path.to_slash_lossy().to_lowercase()); - - entries.push(ArtifactFile { + .or_default() + .push(ArtifactFile { artifact, file: artifact_path, version: source.version.clone(), build_id: source.build_id.clone(), + profile: source.profile.clone(), }); - } } } } @@ -1015,8 +1029,7 @@ pub struct OutputContext<'a> { /// └── inner /// └── a.sol /// ``` - pub existing_artifacts: - BTreeMap<&'a Path, &'a BTreeMap>>, + pub existing_artifacts: BTreeMap<&'a Path, &'a CachedArtifacts>, } // === impl OutputContext @@ -1042,13 +1055,14 @@ impl<'a> OutputContext<'a> { file: &Path, contract: &str, version: &Version, + profile: &str, ) -> Option<&Path> { - self.existing_artifacts.get(file).and_then(|contracts| { - contracts - .get(contract) - .and_then(|versions| versions.get(version)) - .map(|a| a.path.as_path()) - }) + self.existing_artifacts + .get(file) + .and_then(|contracts| contracts.get(contract)) + .and_then(|versions| versions.get(version)) + .and_then(|profiles| profiles.get(profile)) + .map(|a| a.path.as_path()) } } diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 0d5d16139..21a738d7e 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -30,7 +30,7 @@ use std::{ /// `ethers-solc` uses a different format version id, but the actual format is consistent with /// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or /// `ethers-solc` -const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-3"; +const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4"; /// The file name of the default cache file pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json"; @@ -42,13 +42,20 @@ pub struct CompilerCache { pub format: String, /// contains all directories used for the project pub paths: ProjectPaths, - pub files: BTreeMap>, + pub files: BTreeMap, pub builds: BTreeSet, + pub profiles: BTreeMap, } impl CompilerCache { pub fn new(format: String, paths: ProjectPaths) -> Self { - Self { format, paths, files: Default::default(), builds: Default::default() } + Self { + format, + paths, + files: Default::default(), + builds: Default::default(), + profiles: Default::default(), + } } } @@ -57,13 +64,8 @@ impl CompilerCache { self.files.is_empty() } - /// Returns `true` if the cache contains any artifacts for the given file and version. - pub fn contains(&self, file: &Path, version: &Version) -> bool { - self.files.get(file).map_or(true, |entry| !entry.contains_version(version)) - } - /// Removes entry for the given file - pub fn remove(&mut self, file: &Path) -> Option> { + pub fn remove(&mut self, file: &Path) -> Option { self.files.remove(file) } @@ -78,17 +80,17 @@ impl CompilerCache { } /// Returns an iterator over all `CacheEntry` this cache contains - pub fn entries(&self) -> impl Iterator> { + pub fn entries(&self) -> impl Iterator { self.files.values() } /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry(&self, file: &Path) -> Option<&CacheEntry> { + pub fn entry(&self, file: &Path) -> Option<&CacheEntry> { self.files.get(file) } /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> { + pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> { self.files.get_mut(file) } @@ -156,6 +158,7 @@ impl CompilerCache { .entries() .flat_map(|e| e.artifacts.values()) .flat_map(|a| a.values()) + .flat_map(|a| a.values()) .any(|a| a.build_id == *build_id) { outdated.push(build_id.to_owned()); @@ -373,6 +376,7 @@ impl Default for CompilerCache { builds: Default::default(), files: Default::default(), paths: Default::default(), + profiles: Default::default(), } } } @@ -393,6 +397,8 @@ pub struct CachedArtifact { pub build_id: String, } +pub type CachedArtifacts = BTreeMap>>; + /// A `CacheEntry` in the cache file represents a solidity file /// /// A solidity file can contain several contracts, for every contract a separate `Artifact` is @@ -400,15 +406,13 @@ pub struct CachedArtifact { /// `solc` versions generating version specific artifacts. #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct CacheEntry { +pub struct CacheEntry { /// the last modification time of this file pub last_modification_date: u64, /// hash to identify whether the content of the file changed pub content_hash: String, /// identifier name see [`foundry_compilers_core::utils::source_name()`] pub source_name: PathBuf, - /// what config was set when compiling this file - pub compiler_settings: S, /// fully resolved imports of the file /// /// all paths start relative from the project's root: `src/importedFile.sol` @@ -422,9 +426,9 @@ pub struct CacheEntry { /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different /// artifacts. /// - /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. + /// This map tracks the artifacts by `name -> (Version -> profile -> PathBuf)`. /// This mimics the default artifacts directory structure - pub artifacts: BTreeMap>, + pub artifacts: CachedArtifacts, /// Whether this file was compiled at least once. /// /// If this is true and `artifacts` are empty, it means that given version of the file does @@ -435,7 +439,7 @@ pub struct CacheEntry { pub seen_by_compiler: bool, } -impl CacheEntry { +impl CacheEntry { /// Returns the last modified timestamp `Duration` pub fn last_modified(&self) -> Duration { Duration::from_millis(self.last_modification_date) @@ -456,7 +460,12 @@ impl CacheEntry { /// # } /// ``` pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> { - self.artifacts.get(contract_name)?.iter().next().map(|(_, p)| p.path.as_path()) + self.artifacts + .get(contract_name)? + .iter() + .next() + .and_then(|(_, a)| a.iter().next()) + .map(|(_, p)| p.path.as_path()) } /// Reads the last modification date from the file's metadata @@ -481,13 +490,16 @@ impl CacheEntry { for (artifact_name, versioned_files) in self.artifacts.iter() { let mut files = Vec::with_capacity(versioned_files.len()); for (version, cached_artifact) in versioned_files { - let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; - files.push(ArtifactFile { - artifact, - file: cached_artifact.path.clone(), - version: version.clone(), - build_id: cached_artifact.build_id.clone(), - }); + for (profile, cached_artifact) in cached_artifact { + let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; + files.push(ArtifactFile { + artifact, + file: cached_artifact.path.clone(), + version: version.clone(), + build_id: cached_artifact.build_id.clone(), + profile: profile.clone(), + }); + } } artifacts.insert(artifact_name.clone(), files); } @@ -501,30 +513,48 @@ impl CacheEntry { { for (name, artifacts) in artifacts.into_iter() { for artifact in artifacts { - self.artifacts.entry(name.clone()).or_default().insert( - artifact.version.clone(), - CachedArtifact { - build_id: artifact.build_id.clone(), - path: artifact.file.clone(), - }, - ); + self.artifacts + .entry(name.clone()) + .or_default() + .entry(artifact.version.clone()) + .or_default() + .insert( + artifact.profile.clone(), + CachedArtifact { + build_id: artifact.build_id.clone(), + path: artifact.file.clone(), + }, + ); } } } /// Returns `true` if the artifacts set contains the given version - pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _)| v == version) + pub fn contains(&self, version: &Version, profile: &str) -> bool { + self.artifacts.values().any(|artifacts| { + artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some() + }) } /// Iterator that yields all artifact files and their version - pub fn artifacts_versions(&self) -> impl Iterator { - self.artifacts.values().flatten() + pub fn artifacts_versions(&self) -> impl Iterator { + self.artifacts + .values() + .flatten() + .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a))) } /// Returns the artifact file for the contract and version pair - pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&CachedArtifact> { - self.artifacts.get(contract).and_then(|files| files.get(version)) + pub fn find_artifact( + &self, + contract: &str, + version: &Version, + profile: &str, + ) -> Option<&CachedArtifact> { + self.artifacts + .get(contract) + .and_then(|files| files.get(version)) + .and_then(|files| files.get(profile)) } /// Iterator that yields all artifact files and their version @@ -532,17 +562,17 @@ impl CacheEntry { &'a self, version: &'a Version, ) -> impl Iterator + 'a { - self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then_some(file)) + self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file)) } /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(BTreeMap::values) + self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values) } /// Mutable iterator over all artifact files pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(BTreeMap::values_mut) + self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut) } /// Checks if all artifact files exist @@ -622,7 +652,7 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput, C: Compiler> { pub content_hashes: HashMap, } -impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { +impl ArtifactsCacheInner<'_, T, C> { /// Creates a new cache entry for the file fn create_cache_entry(&mut self, file: PathBuf, source: &Source) { let imports = self @@ -633,11 +663,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { .collect(); let entry = CacheEntry { - last_modification_date: CacheEntry::::read_last_modification_date(&file) + last_modification_date: CacheEntry::read_last_modification_date(&file) .unwrap_or_default(), content_hash: source.content_hash(), source_name: strip_prefix(&file, self.project.root()).into(), - compiler_settings: self.project.settings.clone(), imports, version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()), // artifacts remain empty until we received the compiler output @@ -658,7 +687,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { /// 2. [SourceCompilationKind::Optimized] - the file is not dirty, but is imported by a dirty /// file and thus will be processed by solc. For such files we don't need full data, so we /// are marking them as clean to optimize output selection later. - fn filter(&mut self, sources: &mut Sources, version: &Version) { + fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) { // sources that should be passed to compiler. let mut compile_complete = HashSet::new(); let mut compile_optimized = HashSet::new(); @@ -667,7 +696,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { self.sources_in_scope.insert(file.clone(), version.clone()); // If we are missing artifact for file, compile it. - if self.is_missing_artifacts(file, version) { + if self.is_missing_artifacts(file, version, profile) { compile_complete.insert(file.clone()); } @@ -701,7 +730,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { /// Returns whether we are missing artifacts for the given file and version. #[instrument(level = "trace", skip(self))] - fn is_missing_artifacts(&self, file: &Path, version: &Version) -> bool { + fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool { let Some(entry) = self.cache.entry(file) else { trace!("missing cache entry"); return true; @@ -715,7 +744,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { return false; } - if !entry.contains_version(version) { + if !entry.contains(version, profile) { trace!("missing linked artifacts"); return true; } @@ -750,6 +779,44 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { } } + let existing_profiles = self.project.settings_profiles().collect::>(); + + let mut dirty_profiles = HashSet::new(); + for (profile, settings) in &self.cache.profiles { + if !existing_profiles + .get(profile.as_str()) + .map_or(false, |p| p.can_use_cached(settings)) + { + trace!("dirty profile: {}", profile); + dirty_profiles.insert(profile.clone()); + } + } + + for profile in &dirty_profiles { + self.cache.profiles.remove(profile); + } + + self.cache.files.retain(|_, entry| { + // keep entries which already had no artifacts + if entry.artifacts.is_empty() { + return true; + } + entry.artifacts.retain(|_, artifacts| { + artifacts.retain(|_, artifacts| { + artifacts.retain(|profile, _| !dirty_profiles.contains(profile)); + !artifacts.is_empty() + }); + !artifacts.is_empty() + }); + !entry.artifacts.is_empty() + }); + + for (profile, settings) in existing_profiles { + if !self.cache.profiles.contains_key(profile) { + self.cache.profiles.insert(profile.to_string(), settings.clone()); + } + } + // Iterate over existing cache entries. let files = self.cache.files.keys().cloned().collect::>(); @@ -811,11 +878,6 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { return true; } - if !self.project.settings.can_use_cached(&entry.compiler_settings) { - trace!("solc config not compatible"); - return true; - } - // If any requested extra files are missing for any artifact, mark source as dirty to // generate them for artifacts in self.cached_artifacts.values() { @@ -974,10 +1036,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { } /// Filters out those sources that don't need to be compiled - pub fn filter(&mut self, sources: &mut Sources, version: &Version) { + pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) { match self { ArtifactsCache::Ephemeral(..) => {} - ArtifactsCache::Cached(cache) => cache.filter(sources, version), + ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile), } } diff --git a/crates/compilers/src/compile/output/contracts.rs b/crates/compilers/src/compile/output/contracts.rs index 99a0e0bd7..004c7d035 100644 --- a/crates/compilers/src/compile/output/contracts.rs +++ b/crates/compilers/src/compile/output/contracts.rs @@ -262,6 +262,7 @@ pub struct VersionedContract { pub contract: Contract, pub version: Version, pub build_id: String, + pub profile: String, } /// A mapping of `ArtifactId` and their `CompactContractBytecode` diff --git a/crates/compilers/src/compile/output/info.rs b/crates/compilers/src/compile/output/info.rs index 0947b2888..ad7e1c1f5 100644 --- a/crates/compilers/src/compile/output/info.rs +++ b/crates/compilers/src/compile/output/info.rs @@ -88,7 +88,7 @@ pub struct ContractInfoRef<'a> { pub name: Cow<'a, str>, } -impl<'a> From for ContractInfoRef<'a> { +impl From for ContractInfoRef<'_> { fn from(info: ContractInfo) -> Self { ContractInfoRef { path: info.path.map(Into::into), name: info.name.into() } } @@ -102,7 +102,7 @@ impl<'a> From<&'a ContractInfo> for ContractInfoRef<'a> { } } } -impl<'a> From for ContractInfoRef<'a> { +impl From for ContractInfoRef<'_> { fn from(info: FullContractInfo) -> Self { ContractInfoRef { path: Some(info.path.into()), name: info.name.into() } } diff --git a/crates/compilers/src/compile/output/mod.rs b/crates/compilers/src/compile/output/mod.rs index 06b5224da..abbf31b9d 100644 --- a/crates/compilers/src/compile/output/mod.rs +++ b/crates/compilers/src/compile/output/mod.rs @@ -564,6 +564,7 @@ impl AggregatedCompilerOutput { &mut self, version: Version, build_info: RawBuildInfo, + profile: &str, output: CompilerOutput, ) { let build_id = build_info.id.clone(); @@ -578,17 +579,19 @@ impl AggregatedCompilerOutput { source_file, version: version.clone(), build_id: build_id.clone(), + profile: profile.to_string(), }); } for (file_name, new_contracts) in contracts { - let contracts = self.contracts.as_mut().entry(file_name).or_default(); + let contracts = self.contracts.0.entry(file_name).or_default(); for (contract_name, contract) in new_contracts { let versioned = contracts.entry(contract_name).or_default(); versioned.push(VersionedContract { contract, version: version.clone(), build_id: build_id.clone(), + profile: profile.to_string(), }); } } @@ -888,7 +891,7 @@ pub struct OutputDiagnostics<'a, C: Compiler> { compiler_severity_filter: Severity, } -impl<'a, C: Compiler> OutputDiagnostics<'a, C> { +impl OutputDiagnostics<'_, C> { /// Returns true if there is at least one error of high severity pub fn has_error(&self) -> bool { self.compiler_output.has_error( @@ -904,7 +907,7 @@ impl<'a, C: Compiler> OutputDiagnostics<'a, C> { } } -impl<'a, C: Compiler> fmt::Display for OutputDiagnostics<'a, C> { +impl fmt::Display for OutputDiagnostics<'_, C> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("Compiler run ")?; if self.has_error() { diff --git a/crates/compilers/src/compile/output/sources.rs b/crates/compilers/src/compile/output/sources.rs index e34fb4643..063c09b92 100644 --- a/crates/compilers/src/compile/output/sources.rs +++ b/crates/compilers/src/compile/output/sources.rs @@ -224,4 +224,5 @@ pub struct VersionedSourceFile { pub source_file: SourceFile, pub version: Version, pub build_id: String, + pub profile: String, } diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index b76c47a5a..4d634e302 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -117,7 +117,7 @@ use semver::Version; use std::{collections::HashMap, path::PathBuf, time::Instant}; /// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = HashMap>; +pub(crate) type VersionedSources<'a, L, S> = HashMap>; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { @@ -125,7 +125,7 @@ pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { edges: GraphEdges, project: &'a Project, /// how to compile all the sources - sources: CompilerSources, + sources: CompilerSources<'a, C::Language, C::Settings>, } impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { @@ -146,11 +146,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { sources.retain(|f, _| filter.is_match(f)) } let graph = Graph::resolve_sources(&project.paths, sources)?; - let (sources, edges) = graph.into_sources_by_version( - project.offline, - &project.locked_versions, - &project.compiler, - )?; + let (sources, edges) = graph.into_sources_by_version(project)?; // If there are multiple different versions, and we can use multiple jobs we can compile // them in parallel. @@ -217,7 +213,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { #[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput, C: Compiler> { /// Contains all the sources to compile. - sources: CompilerSources, + sources: CompilerSources<'a, C::Language, C::Settings>, /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled cache: ArtifactsCache<'a, T, C>, @@ -313,7 +309,7 @@ struct ArtifactsState<'a, T: ArtifactOutput, C: Compiler> { compiled_artifacts: Artifacts, } -impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { +impl ArtifactsState<'_, T, C> { /// Writes the cache file /// /// this concludes the [`Project::compile()`] statemachine @@ -357,14 +353,14 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { /// Determines how the `solc <-> sources` pairs are executed. #[derive(Debug, Clone)] -struct CompilerSources { +struct CompilerSources<'a, L, S> { /// The sources to compile. - sources: VersionedSources, + sources: VersionedSources<'a, L, S>, /// The number of jobs to use for parallel compilation. jobs: Option, } -impl CompilerSources { +impl CompilerSources<'_, L, S> { /// Converts all `\\` separators to `/`. /// /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the @@ -375,7 +371,7 @@ impl CompilerSources { use path_slash::PathBufExt; self.sources.values_mut().for_each(|versioned_sources| { - versioned_sources.values_mut().for_each(|sources| { + versioned_sources.iter_mut().for_each(|(_, sources, _)| { *sources = std::mem::take(sources) .into_iter() .map(|(path, source)| { @@ -394,9 +390,9 @@ impl CompilerSources { ) { cache.remove_dirty_sources(); for versioned_sources in self.sources.values_mut() { - for (version, sources) in versioned_sources { + for (version, sources, (profile, _)) in versioned_sources { trace!("Filtering {} sources for {}", sources.len(), version); - cache.filter(sources, version); + cache.filter(sources, version, profile); trace!( "Detected {} sources to compile {:?}", sources.dirty().count(), @@ -407,7 +403,7 @@ impl CompilerSources { } /// Compiles all the files with `Solc` - fn compile, T: ArtifactOutput>( + fn compile, T: ArtifactOutput>( self, cache: &mut ArtifactsCache<'_, T, C>, ) -> Result> { @@ -424,7 +420,8 @@ impl CompilerSources { let mut jobs = Vec::new(); for (language, versioned_sources) in self.sources { - for (version, sources) in versioned_sources { + for (version, sources, (profile, opt_settings)) in versioned_sources { + let mut opt_settings = opt_settings.clone(); if sources.is_empty() { // nothing to compile trace!("skip {} for empty sources set", version); @@ -433,7 +430,6 @@ impl CompilerSources { // depending on the composition of the filtered sources, the output selection can be // optimized - let mut opt_settings = project.settings.clone(); let actually_dirty = sparse_output.sparse_sources(&sources, &mut opt_settings, graph); @@ -456,7 +452,7 @@ impl CompilerSources { input.strip_prefix(project.paths.root.as_path()); - jobs.push((input, actually_dirty)); + jobs.push((input, profile, actually_dirty)); } } @@ -468,7 +464,7 @@ impl CompilerSources { let mut aggregated = AggregatedCompilerOutput::default(); - for (input, mut output, actually_dirty) in results { + for (input, mut output, profile, actually_dirty) in results { let version = input.version(); // Mark all files as seen by the compiler @@ -485,22 +481,22 @@ impl CompilerSources { ); output.join_all(project.paths.root.as_path()); - aggregated.extend(version.clone(), build_info, output); + aggregated.extend(version.clone(), build_info, profile, output); } Ok(aggregated) } } -type CompilationResult = Result, Vec)>>; +type CompilationResult<'a, I, E> = Result, &'a str, Vec)>>; /// Compiles the input set sequentially and returns a [Vec] of outputs. -fn compile_sequential( +fn compile_sequential<'a, C: Compiler>( compiler: &C, - jobs: Vec<(C::Input, Vec)>, -) -> CompilationResult { + jobs: Vec<(C::Input, &'a str, Vec)>, +) -> CompilationResult<'a, C::Input, C::CompilationError> { jobs.into_iter() - .map(|(input, actually_dirty)| { + .map(|(input, profile, actually_dirty)| { let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), @@ -510,17 +506,17 @@ fn compile_sequential( let output = compiler.compile(&input)?; report::compiler_success(&input.compiler_name(), input.version(), &start.elapsed()); - Ok((input, output, actually_dirty)) + Ok((input, output, profile, actually_dirty)) }) .collect() } /// compiles the input set using `num_jobs` threads -fn compile_parallel( +fn compile_parallel<'a, C: Compiler>( compiler: &C, - jobs: Vec<(C::Input, Vec)>, + jobs: Vec<(C::Input, &'a str, Vec)>, num_jobs: usize, -) -> CompilationResult { +) -> CompilationResult<'a, C::Input, C::CompilationError> { // need to get the currently installed reporter before installing the pool, otherwise each new // thread in the pool will get initialized with the default value of the `thread_local!`'s // localkey. This way we keep access to the reporter in the rayon pool @@ -531,7 +527,7 @@ fn compile_parallel( pool.install(move || { jobs.into_par_iter() - .map(move |(input, actually_dirty)| { + .map(move |(input, profile, actually_dirty)| { // set the reporter on this thread let _guard = report::set_scoped(&scoped_report); @@ -547,7 +543,7 @@ fn compile_parallel( input.version(), &start.elapsed(), ); - (input, output, actually_dirty) + (input, output, profile, actually_dirty) }) }) .collect() @@ -678,7 +674,7 @@ mod tests { // single solc assert_eq!(len, 1); - let filtered = &sources.values().next().unwrap().values().next().unwrap(); + let filtered = &sources.values().next().unwrap()[0].1; // 3 contracts total assert_eq!(filtered.0.len(), 3); diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs index 506ea7ca6..a887c2687 100644 --- a/crates/compilers/src/compilers/mod.rs +++ b/crates/compilers/src/compilers/mod.rs @@ -24,6 +24,9 @@ pub mod solc; pub mod vyper; pub use vyper::*; +mod restrictions; +pub use restrictions::{CompilerSettingsRestrictions, RestrictionsWithVersion}; + /// A compiler version is either installed (available locally) or can be downloaded, from the remote /// endpoint #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] @@ -65,6 +68,10 @@ impl fmt::Display for CompilerVersion { pub trait CompilerSettings: Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static { + /// We allow configuring settings restrictions which might optionally contain specific + /// requiremets for compiler configuration. e.g. min/max evm_version, optimizer runs + type Restrictions: CompilerSettingsRestrictions; + /// Executes given fn with mutable reference to configured [OutputSelection]. fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy); @@ -97,6 +104,9 @@ pub trait CompilerSettings: fn with_include_paths(self, _include_paths: &BTreeSet) -> Self { self } + + /// Returns whether current settings satisfy given restrictions. + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool; } /// Input of a compiler, including sources and settings used for their compilation. diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index 832cdf623..78ab9381c 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -1,5 +1,6 @@ use super::{ - solc::{SolcCompiler, SolcVersionedInput, SOLC_EXTENSIONS}, + restrictions::CompilerSettingsRestrictions, + solc::{SolcCompiler, SolcSettings, SolcVersionedInput, SOLC_EXTENSIONS}, vyper::{ input::VyperVersionedInput, parser::VyperParsedSource, Vyper, VyperLanguage, VYPER_EXTENSIONS, @@ -10,7 +11,8 @@ use super::{ use crate::{ artifacts::vyper::{VyperCompilationError, VyperSettings}, resolver::parse::SolData, - solc::SolcSettings, + settings::VyperRestrictions, + solc::SolcRestrictions, }; use foundry_compilers_artifacts::{ error::SourceLocation, @@ -129,6 +131,18 @@ impl fmt::Display for MultiCompilerError { } } +#[derive(Clone, Copy, Debug, Default)] +pub struct MultiCompilerRestrictions { + pub solc: SolcRestrictions, + pub vyper: VyperRestrictions, +} + +impl CompilerSettingsRestrictions for MultiCompilerRestrictions { + fn merge(self, other: Self) -> Option { + Some(Self { solc: self.solc.merge(other.solc)?, vyper: self.vyper.merge(other.vyper)? }) + } +} + /// Settings for the [MultiCompiler]. Includes settings for both Solc and Vyper compilers. #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct MultiCompilerSettings { @@ -137,6 +151,8 @@ pub struct MultiCompilerSettings { } impl CompilerSettings for MultiCompilerSettings { + type Restrictions = MultiCompilerRestrictions; + fn can_use_cached(&self, other: &Self) -> bool { self.solc.can_use_cached(&other.solc) && self.vyper.can_use_cached(&other.vyper) } @@ -173,6 +189,11 @@ impl CompilerSettings for MultiCompilerSettings { vyper: self.vyper.with_remappings(remappings), } } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + self.solc.satisfies_restrictions(&restrictions.solc) + && self.vyper.satisfies_restrictions(&restrictions.vyper) + } } impl From for SolcSettings { diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs new file mode 100644 index 000000000..acfcf29b1 --- /dev/null +++ b/crates/compilers/src/compilers/restrictions.rs @@ -0,0 +1,47 @@ +use std::{ + fmt::Debug, + ops::{Deref, DerefMut}, +}; + +use semver::VersionReq; + +/// Abstraction over set of restrictions for given [`crate::Compiler::Settings`]. +pub trait CompilerSettingsRestrictions: Copy + Debug + Sync + Send + Clone + Default { + /// Combines this restriction with another one. Returns `None` if restrictions are incompatible. + fn merge(self, other: Self) -> Option; +} + +/// Combines [CompilerSettingsRestrictions] with a restrictions on compiler versions for a given +/// source file. +#[derive(Debug, Clone, Default)] +pub struct RestrictionsWithVersion { + pub version: Option, + pub restrictions: T, +} + +impl RestrictionsWithVersion { + pub fn merge(&mut self, other: Self) { + if let Some(version) = other.version { + if let Some(self_version) = self.version.as_mut() { + self_version.comparators.extend(version.comparators); + } else { + self.version = Some(version.clone()); + } + } + self.restrictions.merge(other.restrictions); + } +} + +impl Deref for RestrictionsWithVersion { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.restrictions + } +} + +impl DerefMut for RestrictionsWithVersion { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.restrictions + } +} diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 0bdb6f42b..d317e67a2 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -1,6 +1,6 @@ use super::{ - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, - Language, ParsedSource, + restrictions::CompilerSettingsRestrictions, CompilationError, Compiler, CompilerInput, + CompilerOutput, CompilerSettings, CompilerVersion, Language, ParsedSource, }; use crate::resolver::parse::SolData; pub use foundry_compilers_artifacts::SolcLanguage; @@ -9,7 +9,7 @@ use foundry_compilers_artifacts::{ output_selection::OutputSelection, remappings::Remapping, sources::{Source, Sources}, - Error, Settings, Severity, SolcInput, + BytecodeHash, Error, EvmVersion, Settings, Severity, SolcInput, }; use foundry_compilers_core::error::Result; use itertools::Itertools; @@ -190,7 +190,91 @@ impl DerefMut for SolcSettings { } } +/// Abstraction over min/max restrictions on some value. +#[derive(Debug, Clone, Copy, Eq, Default, PartialEq)] +pub struct Restriction { + pub min: Option, + pub max: Option, +} + +impl Restriction { + /// Returns true if the given value satisfies the restrictions + /// + /// If given None, only returns true if no restrictions are set + pub fn satisfies(&self, value: Option) -> bool { + self.min.map_or(true, |min| value.map_or(false, |v| v >= min)) + && self.max.map_or(true, |max| value.map_or(false, |v| v <= max)) + } + + /// Combines two restrictions into a new one + pub fn merge(self, other: Self) -> Option { + let Self { mut min, mut max } = self; + let Self { min: other_min, max: other_max } = other; + + min = min.map_or(other_min, |this_min| { + Some(other_min.map_or(this_min, |other_min| this_min.max(other_min))) + }); + max = max.map_or(other_max, |this_max| { + Some(other_max.map_or(this_max, |other_max| this_max.min(other_max))) + }); + + if let (Some(min), Some(max)) = (min, max) { + if min > max { + return None; + } + } + + Some(Self { min, max }) + } + + pub fn apply(&self, value: Option) -> Option { + match (value, self.min, self.max) { + (None, Some(min), _) => Some(min), + (None, None, Some(max)) => Some(max), + (Some(cur), Some(min), _) if cur < min => Some(min), + (Some(cur), _, Some(max)) if cur > max => Some(max), + _ => value, + } + } +} + +/// Restrictions on settings for the solc compiler. +#[derive(Debug, Clone, Copy, Default)] +pub struct SolcRestrictions { + pub evm_version: Restriction, + pub via_ir: Option, + pub optimizer_runs: Restriction, + pub bytecode_hash: Option, +} + +impl CompilerSettingsRestrictions for SolcRestrictions { + fn merge(self, other: Self) -> Option { + if let (Some(via_ir), Some(other_via_ir)) = (self.via_ir, other.via_ir) { + if via_ir != other_via_ir { + return None; + } + } + + if let (Some(bytecode_hash), Some(other_bytecode_hash)) = + (self.bytecode_hash, other.bytecode_hash) + { + if bytecode_hash != other_bytecode_hash { + return None; + } + } + + Some(Self { + evm_version: self.evm_version.merge(other.evm_version)?, + via_ir: self.via_ir.or(other.via_ir), + optimizer_runs: self.optimizer_runs.merge(other.optimizer_runs)?, + bytecode_hash: self.bytecode_hash.or(other.bytecode_hash), + }) + } +} + impl CompilerSettings for SolcSettings { + type Restrictions = SolcRestrictions; + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { f(&mut self.settings.output_selection) } @@ -247,6 +331,26 @@ impl CompilerSettings for SolcSettings { self.cli_settings.include_paths.clone_from(include_paths); self } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + let mut satisfies = true; + + let SolcRestrictions { evm_version, via_ir, optimizer_runs, bytecode_hash } = restrictions; + + satisfies &= evm_version.satisfies(self.evm_version); + satisfies &= via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); + satisfies &= bytecode_hash.map_or(true, |bytecode_hash| { + self.metadata.as_ref().and_then(|m| m.bytecode_hash) == Some(bytecode_hash) + }); + satisfies &= optimizer_runs.satisfies(self.optimizer.runs); + + // Ensure that we either don't have min optimizer runs set or that the optimizer is enabled + satisfies &= optimizer_runs + .min + .map_or(true, |min| min == 0 || self.optimizer.enabled.unwrap_or_default()); + + satisfies + } } impl ParsedSource for SolData { @@ -361,7 +465,7 @@ mod tests { ); let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap(); let mut aggregated = AggregatedCompilerOutput::::default(); - aggregated.extend(v, build_info, out_converted); + aggregated.extend(v, build_info, "default", out_converted); assert!(!aggregated.is_unchanged()); } } diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs index 21ae4526b..2a815d623 100644 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ b/crates/compilers/src/compilers/vyper/settings.rs @@ -1,10 +1,26 @@ use std::{collections::BTreeSet, path::PathBuf}; pub use crate::artifacts::vyper::VyperSettings; -use crate::compilers::CompilerSettings; -use foundry_compilers_artifacts::output_selection::OutputSelection; +use crate::{ + compilers::{restrictions::CompilerSettingsRestrictions, CompilerSettings}, + solc::Restriction, +}; +use foundry_compilers_artifacts::{output_selection::OutputSelection, EvmVersion}; + +#[derive(Clone, Copy, Debug, Default)] +pub struct VyperRestrictions { + pub evm_version: Restriction, +} + +impl CompilerSettingsRestrictions for VyperRestrictions { + fn merge(self, other: Self) -> Option { + Some(Self { evm_version: self.evm_version.merge(other.evm_version)? }) + } +} impl CompilerSettings for VyperSettings { + type Restrictions = VyperRestrictions; + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection)) { f(&mut self.output_selection) } @@ -30,4 +46,8 @@ impl CompilerSettings for VyperSettings { self.search_paths = Some(include_paths.clone()); self } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + restrictions.evm_version.satisfies(self.evm_version) + } } diff --git a/crates/compilers/src/filter.rs b/crates/compilers/src/filter.rs index 7ab20d802..9038e50c6 100644 --- a/crates/compilers/src/filter.rs +++ b/crates/compilers/src/filter.rs @@ -170,7 +170,7 @@ impl<'a> SparseOutputFilter<'a> { } } -impl<'a> fmt::Debug for SparseOutputFilter<'a> { +impl fmt::Debug for SparseOutputFilter<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { SparseOutputFilter::Optimized => f.write_str("Optimized"), diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index fc2f8c3ad..ea6fa1b3d 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -71,12 +71,19 @@ use std::{ #[derivative(Debug)] pub struct Project { pub compiler: C, - /// Compiler versions locked for specific languages. - pub locked_versions: HashMap, /// The layout of the project pub paths: ProjectPathsConfig, /// The compiler settings pub settings: C::Settings, + /// Additional settings for cases when default compiler settings are not enough to cover all + /// possible restrictions. + pub additional_settings: BTreeMap, + /// Mapping from file path to requrements on settings to compile it. + /// + /// This file will only be included into compiler inputs with profiles which satisfy the + /// restrictions. + pub restrictions: + BTreeMap::Restrictions>>, /// Whether caching is enabled pub cached: bool, /// Whether to output build information with each solc call. @@ -142,6 +149,11 @@ impl Project { pub fn artifacts_handler(&self) -> &T { &self.artifacts } + + pub fn settings_profiles(&self) -> impl Iterator { + std::iter::once(("default", &self.settings)) + .chain(self.additional_settings.iter().map(|(p, s)| (p.as_str(), s))) + } } impl Project @@ -441,15 +453,25 @@ impl Project { Ok(paths.remove(0)) } + + /// Invokes [CompilerSettings::update_output_selection] on the project's settings and all + /// additional settings profiles. + pub fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { + self.settings.update_output_selection(f); + self.additional_settings.iter_mut().for_each(|(_, s)| { + s.update_output_selection(f); + }); + } } pub struct ProjectBuilder { /// The layout of the paths: Option>, - /// Compiler versions locked for specific languages. - locked_versions: HashMap, /// How solc invocation should be configured. settings: Option, + additional_settings: BTreeMap, + restrictions: + BTreeMap::Restrictions>>, /// Whether caching is enabled, default is true. cached: bool, /// Whether to output build information with each solc call. @@ -489,8 +511,9 @@ impl ProjectBuilder { compiler_severity_filter: Severity::Error, solc_jobs: None, settings: None, - locked_versions: Default::default(), sparse_output: None, + additional_settings: BTreeMap::new(), + restrictions: BTreeMap::new(), } } @@ -607,23 +630,29 @@ impl ProjectBuilder { } #[must_use] - pub fn locked_version(mut self, lang: impl Into, version: Version) -> Self { - self.locked_versions.insert(lang.into(), version); + pub fn sparse_output(mut self, filter: F) -> Self + where + F: FileFilter + 'static, + { + self.sparse_output = Some(Box::new(filter)); self } #[must_use] - pub fn locked_versions(mut self, versions: HashMap) -> Self { - self.locked_versions = versions; + pub fn additional_settings(mut self, additional: BTreeMap) -> Self { + self.additional_settings = additional; self } #[must_use] - pub fn sparse_output(mut self, filter: F) -> Self - where - F: FileFilter + 'static, - { - self.sparse_output = Some(Box::new(filter)); + pub fn restrictions( + mut self, + restrictions: BTreeMap< + PathBuf, + RestrictionsWithVersion<::Restrictions>, + >, + ) -> Self { + self.restrictions = restrictions; self } @@ -641,14 +670,17 @@ impl ProjectBuilder { slash_paths, ignored_file_paths, settings, - locked_versions, sparse_output, + additional_settings, + restrictions, .. } = self; ProjectBuilder { paths, cached, no_artifacts, + additional_settings, + restrictions, offline, slash_paths, artifacts, @@ -658,7 +690,6 @@ impl ProjectBuilder { solc_jobs, build_info, settings, - locked_versions, sparse_output, } } @@ -677,8 +708,9 @@ impl ProjectBuilder { build_info, slash_paths, settings, - locked_versions, sparse_output, + additional_settings, + restrictions, } = self; let mut paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; @@ -704,8 +736,9 @@ impl ProjectBuilder { offline, slash_paths, settings: settings.unwrap_or_default(), - locked_versions, sparse_output, + additional_settings, + restrictions, }) } } @@ -737,30 +770,31 @@ impl ArtifactOutput for Project { self.artifacts_handler().handle_artifacts(contracts, artifacts) } - fn output_file_name(name: &str) -> PathBuf { - T::output_file_name(name) - } - - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - T::output_file_name_versioned(name, version) - } - - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - T::output_file(contract_file, name) + fn output_file_name( + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + T::output_file_name(name, version, profile, with_version, with_profile) } - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { - T::output_file_versioned(contract_file, name, version) + fn output_file( + contract_file: &Path, + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + T::output_file(contract_file, name, version, profile, with_version, with_profile) } fn contract_name(file: &Path) -> Option { T::contract_name(file) } - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - T::output_exists(contract_file, name, root) - } - fn read_cached_artifact(path: &Path) -> Result { T::read_cached_artifact(path) } diff --git a/crates/compilers/src/project_util/mock.rs b/crates/compilers/src/project_util/mock.rs index 22c6fdbf0..4d47f9147 100644 --- a/crates/compilers/src/project_util/mock.rs +++ b/crates/compilers/src/project_util/mock.rs @@ -606,7 +606,7 @@ impl<'a> NodesIter<'a> { } } -impl<'a> Iterator for NodesIter<'a> { +impl Iterator for NodesIter<'_> { type Item = usize; fn next(&mut self) -> Option { let file = self.stack.pop_front()?; diff --git a/crates/compilers/src/project_util/mod.rs b/crates/compilers/src/project_util/mod.rs index c71757311..48838c900 100644 --- a/crates/compilers/src/project_util/mod.rs +++ b/crates/compilers/src/project_util/mod.rs @@ -55,16 +55,12 @@ impl TempProject { /// Explicitly sets the solc version for the project #[cfg(feature = "svm-solc")] pub fn set_solc(&mut self, solc: &str) -> &mut Self { - use crate::compilers::{multi::MultiCompilerLanguage, solc::SolcLanguage}; - use semver::Version; - - let version = Version::parse(solc).unwrap(); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Solidity), version.clone()); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Yul), version.clone()); + use crate::solc::{Solc, SolcCompiler}; + + self.inner.compiler.solc = Some(SolcCompiler::Specific( + Solc::find_svm_installed_version(&solc.parse().unwrap()).unwrap().unwrap(), + )); + self } } diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index b8a456ca2..a4085a347 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -48,7 +48,7 @@ use crate::{ compilers::{Compiler, CompilerVersion, Language, ParsedSource}, project::VersionedSources, - ProjectPathsConfig, + ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, }; use core::fmt; use foundry_compilers_artifacts::sources::{Source, Sources}; @@ -212,7 +212,7 @@ pub struct Graph { root: PathBuf, } -impl Graph { +impl> Graph { /// Print the graph to `StdOut` pub fn print(&self) { self.print_with_options(Default::default()) @@ -463,12 +463,15 @@ impl> Graph { /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version>( + pub fn into_sources_by_version( self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, - ) -> Result<(VersionedSources, GraphEdges)> { + project: &Project, + ) -> Result<(VersionedSources<'_, L, S>, GraphEdges)> + where + T: ArtifactOutput, + S: CompilerSettings, + C: Compiler, + { /// insert the imports of the given node into the sources map /// There can be following graph: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` @@ -499,38 +502,44 @@ impl> Graph { } } - let versioned_nodes_by_lang = - self.get_input_node_versions(offline, locked_versions, compiler)?; + let versioned_nodes = self.get_input_node_versions(project)?; + let versioned_nodes = self.resolve_settings(project, versioned_nodes)?; let (nodes, edges) = self.split(); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); let mut resulted_sources = HashMap::new(); - // determine the `Sources` set for each solc version - for (language, versioned_nodes) in versioned_nodes_by_lang { - let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); - - for (version, input_node_indices) in versioned_nodes { - let mut sources = Sources::new(); - - // all input nodes will be processed - let mut processed_sources = input_node_indices.iter().copied().collect(); + let profiles = project.settings_profiles().collect::>(); - // we only process input nodes (from sources, tests for example) - for idx in input_node_indices { - // insert the input node in the sources set and remove it from the available set - let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed"); - sources.insert(path, source); - insert_imports( - idx, - &mut all_nodes, - &mut sources, - &edges.edges, - &mut processed_sources, - ); + // determine the `Sources` set for each solc version + for (language, versioned_nodes) in versioned_nodes { + let mut versioned_sources = Vec::with_capacity(versioned_nodes.len()); + + for (version, profile_to_nodes) in versioned_nodes { + for (profile_idx, input_node_indixies) in profile_to_nodes { + let mut sources = Sources::new(); + + // all input nodes will be processed + let mut processed_sources = input_node_indixies.iter().copied().collect(); + + // we only process input nodes (from sources, tests for example) + for idx in input_node_indixies { + // insert the input node in the sources set and remove it from the available + // set + let (path, source) = + all_nodes.get(&idx).cloned().expect("node is preset. qed"); + sources.insert(path, source); + insert_imports( + idx, + &mut all_nodes, + &mut sources, + &edges.edges, + &mut processed_sources, + ); + } + versioned_sources.push((version.clone(), sources, profiles[profile_idx])); } - versioned_sources.insert(version, sources); } resulted_sources.insert(language, versioned_sources); @@ -547,10 +556,11 @@ impl> Graph { /// path/to/c.sol () /// ... /// ``` - fn format_imports_list( + fn format_imports_list( &self, idx: usize, incompatible: HashSet, + project: &Project, f: &mut W, ) -> std::result::Result<(), std::fmt::Error> { let format_node = |idx, f: &mut W| { @@ -558,7 +568,7 @@ impl> Graph { let color = if incompatible.contains(&idx) { Color::Red } else { Color::White }; let mut line = utils::source_name(&node.path, &self.root).display().to_string(); - if let Some(req) = node.data.version_req() { + if let Some(req) = self.version_requirement(idx, project) { line.push_str(&format!(" {req}")); } @@ -574,39 +584,87 @@ impl> Graph { Ok(()) } + /// Combines version requirement parsed from file and from project restrictions. + fn version_requirement( + &self, + idx: usize, + project: &Project, + ) -> Option { + let node = self.node(idx); + let parsed_req = node.data.version_req(); + let other_req = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref()); + + match (parsed_req, other_req) { + (Some(parsed_req), Some(other_req)) => { + let mut req = parsed_req.clone(); + req.comparators.extend(other_req.comparators.clone()); + Some(req) + } + (Some(parsed_req), None) => Some(parsed_req.clone()), + (None, Some(other_req)) => Some(other_req.clone()), + _ => None, + } + } + + /// Checks that the file's version is even available. + /// + /// This returns an error if the file's version is invalid semver, or is not available such as + /// 0.8.20, if the highest available version is `0.8.19` + fn check_available_version( + &self, + idx: usize, + all_versions: &[&CompilerVersion], + project: &Project, + ) -> std::result::Result<(), SourceVersionError> { + let Some(req) = self.version_requirement(idx, project) else { return Ok(()) }; + + if !all_versions.iter().any(|v| req.matches(v.as_ref())) { + return if project.offline { + Err(SourceVersionError::NoMatchingVersionOffline(req.clone())) + } else { + Err(SourceVersionError::NoMatchingVersion(req.clone())) + }; + } + + Ok(()) + } + /// Filters incompatible versions from the `candidates`. It iterates over node imports and in /// case if there is no compatible version it returns the latest seen node id. - fn retain_compatible_versions( + fn retain_compatible_versions( &self, idx: usize, candidates: &mut Vec<&CompilerVersion>, - offline: bool, + project: &Project, ) -> Result<(), String> { let mut all_versions = candidates.clone(); let nodes: Vec<_> = self.node_ids(idx).collect(); - let mut failed_node = None; + let mut failed_node_idx = None; for node in nodes.iter() { - if let Some(req) = self.node(*node).data.version_req() { + if let Some(req) = self.version_requirement(*node, project) { candidates.retain(|v| req.matches(v.as_ref())); if candidates.is_empty() { - failed_node = Some(*node); + failed_node_idx = Some(*node); break; } } } - let Some(failed_node_idx) = failed_node else { + let Some(failed_node_idx) = failed_node_idx else { // everything is fine return Ok(()); }; // This now keeps data for the node which were the last one before we had no candidates - // left. It means that there + // left. It means that there is a node directly conflicting with it in `nodes` coming + // before. let failed_node = self.node(failed_node_idx); - if let Err(version_err) = failed_node.check_available_version(&all_versions, offline) { + if let Err(version_err) = + self.check_available_version(failed_node_idx, &all_versions, project) + { // check if the version is even valid let f = utils::source_name(&failed_node.path, &self.root).display(); return Err( @@ -617,24 +675,95 @@ impl> Graph { // which requirement conflicts with it // retain only versions compatible with the `failed_node` - if let Some(req) = failed_node.data.version_req() { + if let Some(req) = self.version_requirement(failed_node_idx, project) { all_versions.retain(|v| req.matches(v.as_ref())); } // iterate over all the nodes once again and find the one incompatible for node in &nodes { - if self.node(*node).check_available_version(&all_versions, offline).is_err() { + if self.check_available_version(*node, &all_versions, project).is_err() { let mut msg = "Found incompatible versions:\n".white().to_string(); - self.format_imports_list(idx, [*node, failed_node_idx].into(), &mut msg) - .unwrap(); + self.format_imports_list( + idx, + [*node, failed_node_idx].into(), + project, + &mut msg, + ) + .unwrap(); return Err(msg); } } } let mut msg = "Found incompatible versions:\n".white().to_string(); - self.format_imports_list(idx, nodes.into_iter().collect(), &mut msg).unwrap(); + self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap(); + Err(msg) + } + + /// Filters profiles incompatible with the given node and its imports. + fn retain_compatible_profiles( + &self, + idx: usize, + project: &Project, + candidates: &mut Vec<(usize, (&str, &C::Settings))>, + ) -> Result<(), String> { + let mut all_profiles = candidates.clone(); + + let nodes: Vec<_> = self.node_ids(idx).collect(); + let mut failed_node_idx = None; + for node in nodes.iter() { + if let Some(req) = project.restrictions.get(&self.node(*node).path) { + candidates.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req)); + if candidates.is_empty() { + failed_node_idx = Some(*node); + break; + } + } + } + + let Some(failed_node_idx) = failed_node_idx else { + // everything is fine + return Ok(()); + }; + + let failed_node = self.node(failed_node_idx); + + // retain only profiles compatible with the `failed_node` + if let Some(req) = project.restrictions.get(&failed_node.path) { + all_profiles.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req)); + } + + if all_profiles.is_empty() { + let f = utils::source_name(&failed_node.path, &self.root).display(); + return Err( + format!("Missing profile satisfying settings restrictions for {f}").to_string() + ); + } + + // iterate over all the nodes once again and find the one incompatible + for node in &nodes { + if let Some(req) = project.restrictions.get(&self.node(*node).path) { + if !all_profiles + .iter() + .any(|(_, (_, settings))| settings.satisfies_restrictions(&**req)) + { + let mut msg = "Found incompatible settings restrictions:\n".white().to_string(); + + self.format_imports_list( + idx, + [*node, failed_node_idx].into(), + project, + &mut msg, + ) + .unwrap(); + return Err(msg); + } + } + } + + let mut msg = "Found incompatible settings restrictions:\n".white().to_string(); + self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap(); Err(msg) } @@ -658,35 +787,30 @@ impl> Graph { /// /// This also attempts to prefer local installations over remote available. /// If `offline` is set to `true` then only already installed. - fn get_input_node_versions>( + fn get_input_node_versions, T: ArtifactOutput>( &self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, + project: &Project, ) -> Result>>> { trace!("resolving input node versions"); let mut resulted_nodes = HashMap::new(); for (language, nodes) in self.input_nodes_by_language() { - if let Some(version) = locked_versions.get(&language) { - resulted_nodes.insert(language, HashMap::from([(version.clone(), nodes)])); - continue; - } // this is likely called by an application and will be eventually printed so we don't // exit on first error, instead gather all the errors and return a bundled // error message instead let mut errors = Vec::new(); // the sorted list of all versions - let all_versions = if offline { - compiler + let all_versions = if project.offline { + project + .compiler .available_versions(&language) .into_iter() .filter(|v| v.is_installed()) .collect() } else { - compiler.available_versions(&language) + project.compiler.available_versions(&language) }; if all_versions.is_empty() && !nodes.is_empty() { @@ -705,7 +829,7 @@ impl> Graph { let mut candidates = all_versions.iter().collect::>(); // remove all incompatible versions from the candidates list by checking the node // and all its imports - if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, offline) { + if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, project) { errors.push(err); } else { // found viable candidates, pick the most recent version that's already @@ -759,6 +883,43 @@ impl> Graph { Ok(resulted_nodes) } + #[allow(clippy::complexity)] + fn resolve_settings, T: ArtifactOutput>( + &self, + project: &Project, + input_nodes_versions: HashMap>>, + ) -> Result>>>> { + let mut resulted_sources = HashMap::new(); + let mut errors = Vec::new(); + for (language, versions) in input_nodes_versions { + let mut versioned_sources = HashMap::new(); + for (version, nodes) in versions { + let mut profile_to_nodes = HashMap::new(); + for idx in nodes { + let mut profile_candidates = + project.settings_profiles().enumerate().collect::>(); + if let Err(err) = + self.retain_compatible_profiles(idx, project, &mut profile_candidates) + { + errors.push(err); + } else { + let (profile_idx, _) = profile_candidates.first().expect("exists"); + profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx); + } + } + versioned_sources.insert(version, profile_to_nodes); + } + resulted_sources.insert(language, versioned_sources); + } + + if errors.is_empty() { + Ok(resulted_sources) + } else { + error!("failed to resolve settings"); + Err(SolcError::msg(errors.join("\n"))) + } + } + /// Tries to find the "best" set of versions to nodes, See [Solc version /// auto-detection](#solc-version-auto-detection) /// @@ -857,7 +1018,7 @@ impl<'a, D> NodesIter<'a, D> { } } -impl<'a, D> Iterator for NodesIter<'a, D> { +impl Iterator for NodesIter<'_, D> { type Item = usize; fn next(&mut self) -> Option { let node = self.stack.pop_front()?; @@ -912,28 +1073,6 @@ impl Node { pub fn unpack(&self) -> (&PathBuf, &Source) { (&self.path, &self.source) } - - /// Checks that the file's version is even available. - /// - /// This returns an error if the file's version is invalid semver, or is not available such as - /// 0.8.20, if the highest available version is `0.8.19` - fn check_available_version( - &self, - all_versions: &[&CompilerVersion], - offline: bool, - ) -> std::result::Result<(), SourceVersionError> { - let Some(req) = self.data.version_req() else { return Ok(()) }; - - if !all_versions.iter().any(|v| req.matches(v.as_ref())) { - return if offline { - Err(SourceVersionError::NoMatchingVersionOffline(req.clone())) - } else { - Err(SourceVersionError::NoMatchingVersion(req.clone())) - }; - } - - Ok(()) - } } /// Helper type for formatting a node @@ -942,7 +1081,7 @@ pub(crate) struct DisplayNode<'a, D> { root: &'a PathBuf, } -impl<'a, D: ParsedSource> fmt::Display for DisplayNode<'a, D> { +impl fmt::Display for DisplayNode<'_, D> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let path = utils::source_name(&self.node.path, self.root); write!(f, "{}", path.display())?; @@ -1057,14 +1196,17 @@ src/Dapp.t.sol >=0.6.6 #[test] #[cfg(feature = "svm-solc")] fn test_print_unresolved() { + use crate::{solc::SolcCompiler, ProjectBuilder}; + let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas"); let paths = ProjectPathsConfig::dapptools(&root).unwrap(); let graph = Graph::::resolve(&paths).unwrap(); let Err(SolcError::Message(err)) = graph.get_input_node_versions( - false, - &Default::default(), - &crate::solc::SolcCompiler::AutoDetect, + &ProjectBuilder::::default() + .paths(paths) + .build(SolcCompiler::AutoDetect) + .unwrap(), ) else { panic!("expected error"); }; diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index 7a89c4632..0e0cf9c47 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -14,15 +14,17 @@ use foundry_compilers::{ }, flatten::Flattener, info::ContractInfo, + multi::MultiCompilerRestrictions, project_util::*, - solc::SolcSettings, + solc::{Restriction, SolcRestrictions, SolcSettings}, take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, - ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, TestFileFilter, + ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, RestrictionsWithVersion, + TestFileFilter, }; use foundry_compilers_artifacts::{ output_selection::OutputSelection, remappings::Remapping, BytecodeHash, DevDoc, Error, - ErrorDoc, EventDoc, Libraries, MethodDoc, ModelCheckerEngine::CHC, ModelCheckerSettings, - Settings, Severity, SolcInput, UserDoc, UserDocNotice, + ErrorDoc, EventDoc, EvmVersion, Libraries, MethodDoc, ModelCheckerEngine::CHC, + ModelCheckerSettings, Settings, Severity, SolcInput, UserDoc, UserDocNotice, }; use foundry_compilers_core::{ error::SolcError, @@ -3848,12 +3850,14 @@ fn test_deterministic_metadata() { let orig_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); copy_dir_all(&orig_root, tmp_dir.path()).unwrap(); + let compiler = MultiCompiler { + solc: Some(SolcCompiler::Specific( + Solc::find_svm_installed_version(&Version::new(0, 8, 18)).unwrap().unwrap(), + )), + vyper: None, + }; let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); - let project = Project::builder() - .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) - .paths(paths) - .build(MultiCompiler::default()) - .unwrap(); + let project = Project::builder().paths(paths).build(compiler).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -3993,7 +3997,6 @@ fn test_can_compile_multi() { } // This is a reproduction of https://github.com/foundry-rs/compilers/issues/47 -#[cfg(feature = "svm-solc")] #[test] fn remapping_trailing_slash_issue47() { use std::sync::Arc; @@ -4024,3 +4027,82 @@ fn remapping_trailing_slash_issue47() { let output = compiler.compile_exact(&input).unwrap(); assert!(!output.has_error()); } + +#[test] +fn test_settings_restrictions() { + let mut project = TempProject::::dapptools().unwrap(); + // default EVM version is Paris, Cancun contract won't compile + project.project_mut().settings.solc.evm_version = Some(EvmVersion::Paris); + + let common_path = project.add_source("Common.sol", "").unwrap(); + + let cancun_path = project + .add_source( + "Cancun.sol", + r#" +import "./Common.sol"; + +contract TransientContract { + function lock()public { + assembly { + tstore(0, 1) + } + } +}"#, + ) + .unwrap(); + + let cancun_importer_path = + project.add_source("CancunImporter.sol", "import \"./Cancun.sol\";").unwrap(); + let simple_path = project + .add_source( + "Simple.sol", + r#" +import "./Common.sol"; + +contract SimpleContract {} +"#, + ) + .unwrap(); + + // Add config with Cancun enabled + let mut cancun_settings = project.project().settings.clone(); + cancun_settings.solc.evm_version = Some(EvmVersion::Cancun); + project.project_mut().additional_settings.insert("cancun".to_string(), cancun_settings); + + let cancun_restriction = RestrictionsWithVersion { + restrictions: MultiCompilerRestrictions { + solc: SolcRestrictions { + evm_version: Restriction { min: Some(EvmVersion::Cancun), ..Default::default() }, + ..Default::default() + }, + ..Default::default() + }, + version: None, + }; + + // Restrict compiling Cancun contract to Cancun EVM version + project.project_mut().restrictions.insert(cancun_path.clone(), cancun_restriction); + + let output = project.compile().unwrap(); + + output.assert_success(); + + let artifacts = output + .artifact_ids() + .map(|(id, _)| (id.profile, id.source)) + .collect::>() + .into_iter() + .collect::>(); + + assert_eq!( + artifacts, + vec![ + ("cancun".to_string(), cancun_path), + ("cancun".to_string(), cancun_importer_path), + ("cancun".to_string(), common_path.clone()), + ("default".to_string(), common_path), + ("default".to_string(), simple_path), + ] + ); +}