diff --git a/crates/artifacts/artifacts/Cargo.toml b/crates/artifacts/artifacts/Cargo.toml index 0bea015dd..5664a7de5 100644 --- a/crates/artifacts/artifacts/Cargo.toml +++ b/crates/artifacts/artifacts/Cargo.toml @@ -20,3 +20,6 @@ foundry-compilers-artifacts-vyper.workspace = true [features] async = ["foundry-compilers-artifacts-solc/async"] +checksum = ["foundry-compilers-artifacts-solc/checksum"] +walkdir = ["foundry-compilers-artifacts-solc/walkdir"] +rayon = ["foundry-compilers-artifacts-solc/rayon"] diff --git a/crates/artifacts/solc/Cargo.toml b/crates/artifacts/solc/Cargo.toml index 33a6361ec..785334122 100644 --- a/crates/artifacts/solc/Cargo.toml +++ b/crates/artifacts/solc/Cargo.toml @@ -19,18 +19,26 @@ foundry-compilers-core.workspace = true alloy-json-abi.workspace = true alloy-primitives.workspace = true -futures-util = { workspace = true, optional = true } -md-5.workspace = true -rayon.workspace = true semver.workspace = true serde_json.workspace = true +serde_repr = "0.1" serde.workspace = true thiserror.workspace = true -tokio = { workspace = true, optional = true } tracing.workspace = true -walkdir.workspace = true yansi.workspace = true -serde_repr = "0.1" + +# async +tokio = { workspace = true, optional = true, features = ["fs"] } +futures-util = { workspace = true, optional = true } + +# checksum +md-5 = { workspace = true, optional = true } + +# walkdir +walkdir = { workspace = true, optional = true } + +# rayon +rayon = { workspace = true, optional = true } [target.'cfg(windows)'.dependencies] path-slash.workspace = true @@ -41,4 +49,7 @@ similar-asserts.workspace = true foundry-compilers-core = { workspace = true, features = ["test-utils"] } [features] -async = ["dep:tokio", "futures-util", "tokio/fs"] +async = ["dep:tokio", "dep:futures-util"] +checksum = ["dep:md-5"] +walkdir = ["dep:walkdir", "foundry-compilers-core/walkdir"] +rayon = ["dep:rayon"] diff --git a/crates/artifacts/solc/src/remappings.rs b/crates/artifacts/solc/src/remappings/find.rs similarity index 73% rename from crates/artifacts/solc/src/remappings.rs rename to crates/artifacts/solc/src/remappings/find.rs index 3fd749649..ee272eb68 100644 --- a/crates/artifacts/solc/src/remappings.rs +++ b/crates/artifacts/solc/src/remappings/find.rs @@ -1,10 +1,8 @@ +use super::Remapping; use foundry_compilers_core::utils; -use serde::{Deserialize, Serialize}; use std::{ collections::{btree_map::Entry, BTreeMap, HashSet}, - fmt, path::{Path, PathBuf}, - str::FromStr, }; const DAPPTOOLS_CONTRACTS_DIR: &str = "src"; @@ -12,158 +10,6 @@ const DAPPTOOLS_LIB_DIR: &str = "lib"; const JS_CONTRACTS_DIR: &str = "contracts"; const JS_LIB_DIR: &str = "node_modules"; -/// The solidity compiler can only reference files that exist locally on your computer. -/// So importing directly from GitHub (as an example) is not possible. -/// -/// Let's imagine you want to use OpenZeppelin's amazing library of smart contracts, -/// `@openzeppelin/contracts-ethereum-package`: -/// -/// ```ignore -/// pragma solidity 0.5.11; -/// -/// import "@openzeppelin/contracts-ethereum-package/contracts/math/SafeMath.sol"; -/// -/// contract MyContract { -/// using SafeMath for uint256; -/// ... -/// } -/// ``` -/// -/// When using `solc`, you have to specify the following: -/// -/// - A `prefix`: the path that's used in your smart contract, i.e. -/// `@openzeppelin/contracts-ethereum-package` -/// - A `target`: the absolute path of the downloaded contracts on your computer -/// -/// The format looks like this: `solc prefix=target ./MyContract.sol` -/// -/// For example: -/// -/// ```text -/// solc --bin \ -/// @openzeppelin/contracts-ethereum-package=/Your/Absolute/Path/To/@openzeppelin/contracts-ethereum-package \ -/// ./MyContract.sol -/// ``` -/// -/// You can also specify a `context` which limits the scope of the remapping to a subset of your -/// project. This allows you to apply the remapping only to imports located in a specific library or -/// a specific file. Without a context a remapping is applied to every matching import in all files. -/// -/// The format is: `solc context:prefix=target ./MyContract.sol` -/// -/// [Source](https://ethereum.stackexchange.com/questions/74448/what-are-remappings-and-how-do-they-work-in-solidity) -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Remapping { - pub context: Option, - pub name: String, - pub path: String, -} - -impl Remapping { - /// Convenience function for [`RelativeRemapping::new`] - pub fn into_relative(self, root: &Path) -> RelativeRemapping { - RelativeRemapping::new(self, root) - } - - /// Removes the `base` path from the remapping - pub fn strip_prefix(&mut self, base: &Path) -> &mut Self { - if let Ok(stripped) = Path::new(&self.path).strip_prefix(base) { - self.path = stripped.display().to_string(); - } - self - } -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, thiserror::Error)] -pub enum RemappingError { - #[error("invalid remapping format, found `{0}`, expected `=`")] - InvalidRemapping(String), - #[error("remapping key can't be empty, found `{0}`, expected `=`")] - EmptyRemappingKey(String), - #[error("remapping value must be a path, found `{0}`, expected `=`")] - EmptyRemappingValue(String), -} - -impl FromStr for Remapping { - type Err = RemappingError; - - fn from_str(remapping: &str) -> Result { - let (name, path) = remapping - .split_once('=') - .ok_or_else(|| RemappingError::InvalidRemapping(remapping.to_string()))?; - let (context, name) = name - .split_once(':') - .map_or((None, name), |(context, name)| (Some(context.to_string()), name)); - if name.trim().is_empty() { - return Err(RemappingError::EmptyRemappingKey(remapping.to_string())); - } - if path.trim().is_empty() { - return Err(RemappingError::EmptyRemappingValue(remapping.to_string())); - } - // if the remapping just starts with : (no context name), treat it as global - let context = context.filter(|c| !c.trim().is_empty()); - Ok(Self { context, name: name.to_string(), path: path.to_string() }) - } -} - -impl Serialize for Remapping { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::ser::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> Deserialize<'de> for Remapping { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::de::Deserializer<'de>, - { - let remapping = String::deserialize(deserializer)?; - Self::from_str(&remapping).map_err(serde::de::Error::custom) - } -} - -// Remappings are printed as `prefix=target` -impl fmt::Display for Remapping { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut s = String::new(); - if let Some(context) = self.context.as_ref() { - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - s.push_str(&std::path::Path::new(context).to_slash_lossy()); - } - #[cfg(not(target_os = "windows"))] - { - s.push_str(context); - } - s.push(':'); - } - let name = - if !self.name.ends_with('/') { format!("{}/", self.name) } else { self.name.clone() }; - s.push_str(&{ - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - format!("{}={}", name, std::path::Path::new(&self.path).to_slash_lossy()) - } - #[cfg(not(target_os = "windows"))] - { - format!("{}={}", name, self.path) - } - }); - - if !s.ends_with('/') { - s.push('/'); - } - f.write_str(&s) - } -} - impl Remapping { /// Attempts to autodetect all remappings given a certain root path. /// @@ -194,12 +40,14 @@ impl Remapping { /// Remapping detection is primarily designed for dapptool's rules for lib folders, however, we /// attempt to detect and optimize various folder structures commonly used in `node_modules` /// dependencies. For those the same rules apply. In addition, we try to unify all - /// remappings discovered according to the rules mentioned above, so that layouts like, - // @aave/ - // ├─ governance/ - // │ ├─ contracts/ - // ├─ protocol-v2/ - // │ ├─ contracts/ + /// remappings discovered according to the rules mentioned above, so that layouts like: + /// ```text + /// @aave/ + /// ├─ governance/ + /// │ ├─ contracts/ + /// ├─ protocol-v2/ + /// │ ├─ contracts/ + /// ``` /// /// which would be multiple rededications according to our rules ("governance", "protocol-v2"), /// are unified into `@aave` by looking at their common ancestor, the root of this subdirectory @@ -271,185 +119,6 @@ impl Remapping { .map(|(name, path)| Self { context: None, name, path: format!("{}/", path.display()) }) .collect() } - - /// Converts any `\\` separators in the `path` to `/` - pub fn slash_path(&mut self) { - #[cfg(windows)] - { - use path_slash::PathExt; - self.path = Path::new(&self.path).to_slash_lossy().to_string(); - if let Some(context) = self.context.as_mut() { - *context = Path::new(&context).to_slash_lossy().to_string(); - } - } - } -} - -/// A relative [`Remapping`] that's aware of the current location -/// -/// See [`RelativeRemappingPathBuf`] -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct RelativeRemapping { - pub context: Option, - pub name: String, - pub path: RelativeRemappingPathBuf, -} - -impl RelativeRemapping { - /// Creates a new `RelativeRemapping` starting prefixed with `root` - pub fn new(remapping: Remapping, root: &Path) -> Self { - Self { - context: remapping.context.map(|c| { - RelativeRemappingPathBuf::with_root(root, c).path.to_string_lossy().to_string() - }), - name: remapping.name, - path: RelativeRemappingPathBuf::with_root(root, remapping.path), - } - } - - /// Converts this relative remapping into an absolute remapping - /// - /// This sets to root of the remapping to the given `root` path - pub fn to_remapping(mut self, root: PathBuf) -> Remapping { - self.path.parent = Some(root); - self.into() - } - - /// Converts this relative remapping into [`Remapping`] without the root path - pub fn to_relative_remapping(mut self) -> Remapping { - self.path.parent.take(); - self.into() - } -} - -// Remappings are printed as `prefix=target` -impl fmt::Display for RelativeRemapping { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut s = String::new(); - if let Some(context) = self.context.as_ref() { - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - s.push_str(&std::path::Path::new(context).to_slash_lossy()); - } - #[cfg(not(target_os = "windows"))] - { - s.push_str(context); - } - s.push(':'); - } - s.push_str(&{ - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - format!("{}={}", self.name, self.path.original().to_slash_lossy()) - } - #[cfg(not(target_os = "windows"))] - { - format!("{}={}", self.name, self.path.original().display()) - } - }); - - if !s.ends_with('/') { - s.push('/'); - } - f.write_str(&s) - } -} - -impl From for Remapping { - fn from(r: RelativeRemapping) -> Self { - let RelativeRemapping { context, mut name, path } = r; - let mut path = path.relative().display().to_string(); - if !path.ends_with('/') { - path.push('/'); - } - if !name.ends_with('/') { - name.push('/'); - } - Self { context, name, path } - } -} - -impl From for RelativeRemapping { - fn from(r: Remapping) -> Self { - Self { context: r.context, name: r.name, path: r.path.into() } - } -} - -/// The path part of the [`Remapping`] that knows the path of the file it was configured in, if any. -/// -/// A [`Remapping`] is intended to be absolute, but paths in configuration files are often desired -/// to be relative to the configuration file itself. For example, a path of -/// `weird-erc20/=lib/weird-erc20/src/` configured in a file `/var/foundry.toml` might be desired to -/// resolve as a `weird-erc20/=/var/lib/weird-erc20/src/` remapping. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct RelativeRemappingPathBuf { - pub parent: Option, - pub path: PathBuf, -} - -impl RelativeRemappingPathBuf { - /// Creates a new `RelativeRemappingPathBuf` that checks if the `path` is a child path of - /// `parent`. - pub fn with_root( - parent: impl AsRef + Into, - path: impl AsRef + Into, - ) -> Self { - if let Ok(path) = path.as_ref().strip_prefix(parent.as_ref()) { - Self { parent: Some(parent.into()), path: path.to_path_buf() } - } else if path.as_ref().has_root() { - Self { parent: None, path: path.into() } - } else { - Self { parent: Some(parent.into()), path: path.into() } - } - } - - /// Returns the path as it was declared, without modification. - pub fn original(&self) -> &Path { - &self.path - } - - /// Returns this path relative to the file it was declared in, if any. - /// Returns the original if this path was not declared in a file or if the - /// path has a root. - pub fn relative(&self) -> PathBuf { - if self.original().has_root() { - return self.original().into(); - } - self.parent - .as_ref() - .map(|p| p.join(self.original())) - .unwrap_or_else(|| self.original().into()) - } -} - -impl> From

for RelativeRemappingPathBuf { - fn from(path: P) -> Self { - Self { parent: None, path: path.into() } - } -} - -impl Serialize for RelativeRemapping { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::ser::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> Deserialize<'de> for RelativeRemapping { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::de::Deserializer<'de>, - { - let remapping = String::deserialize(deserializer)?; - let remapping = Remapping::from_str(&remapping).map_err(serde::de::Error::custom)?; - Ok(Self { context: remapping.context, name: remapping.name, path: remapping.path.into() }) - } } #[derive(Clone, Debug)] @@ -809,51 +478,26 @@ fn last_nested_source_dir(root: &Path, dir: &Path) -> PathBuf { #[cfg(test)] mod tests { - use super::*; + use super::{super::tests::*, *}; use foundry_compilers_core::utils::{mkdir_or_touch, tempdir, touch}; use similar_asserts::assert_eq; - #[test] - fn relative_remapping() { - let remapping = "oz=a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - - let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b/c")); - assert_eq!(relative.path.relative(), Path::new(&remapping.path)); - assert_eq!(relative.path.original(), Path::new("d")); - - let relative = RelativeRemapping::new(remapping.clone(), Path::new("x/y")); - assert_eq!(relative.path.relative(), Path::new("x/y/a/b/c/d")); - assert_eq!(relative.path.original(), Path::new(&remapping.path)); - - let remapping = "oz=/a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b")); - assert_eq!(relative.path.relative(), Path::new(&remapping.path)); - assert_eq!(relative.path.original(), Path::new(&remapping.path)); - assert!(relative.path.parent.is_none()); - - let relative = RelativeRemapping::new(remapping, Path::new("/a/b")); - assert_eq!(relative.to_relative_remapping(), Remapping::from_str("oz/=c/d/").unwrap()); + /// Helper function for converting PathBufs to remapping strings. + fn to_str(p: std::path::PathBuf) -> String { + format!("{}/", p.display()) } #[test] - fn remapping_errors() { - let remapping = "oz=../b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - assert_eq!(remapping.name, "oz".to_string()); - assert_eq!(remapping.path, "../b/c/d".to_string()); - - let err = Remapping::from_str("").unwrap_err(); - matches!(err, RemappingError::InvalidRemapping(_)); - - let err = Remapping::from_str("oz=").unwrap_err(); - matches!(err, RemappingError::EmptyRemappingValue(_)); - } - - // helper function for converting path bufs to remapping strings - fn to_str(p: std::path::PathBuf) -> String { - format!("{}/", p.display()) + fn can_determine_nested_window() { + let a = Path::new( + "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib", + ); + let b = Path::new( + "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test/src" + ); + assert_eq!(next_nested_window(a, b),Path::new( + "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test" + )); } #[test] @@ -1155,46 +799,6 @@ mod tests { assert_eq!(remappings, expected); } - #[test] - fn can_resolve_contexts() { - let remapping = "context:oz=a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { - context: Some("context".to_string()), - name: "oz".to_string(), - path: "a/b/c/d".to_string(), - } - ); - assert_eq!(remapping.to_string(), "context:oz/=a/b/c/d/".to_string()); - - let remapping = "context:foo=C:/bar/src/"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { - context: Some("context".to_string()), - name: "foo".to_string(), - path: "C:/bar/src/".to_string() - } - ); - } - - #[test] - fn can_resolve_global_contexts() { - let remapping = ":oz=a/b/c/d/"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { context: None, name: "oz".to_string(), path: "a/b/c/d/".to_string() } - ); - assert_eq!(remapping.to_string(), "oz/=a/b/c/d/".to_string()); - } - #[test] fn remappings() { let tmp_dir = tempdir("tmp").unwrap(); @@ -1324,19 +928,6 @@ mod tests { assert_eq!(remappings, expected); } - #[test] - fn can_determine_nested_window() { - let a = Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib", - ); - let b = Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test/src" - ); - assert_eq!(next_nested_window(a, b),Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test" - )); - } - #[test] #[cfg_attr(windows, ignore = "Windows remappings #2347")] fn find_openzeppelin_remapping() { diff --git a/crates/artifacts/solc/src/remappings/mod.rs b/crates/artifacts/solc/src/remappings/mod.rs new file mode 100644 index 000000000..5899d9b4b --- /dev/null +++ b/crates/artifacts/solc/src/remappings/mod.rs @@ -0,0 +1,426 @@ +use serde::{Deserialize, Serialize}; +use std::{ + fmt, + path::{Path, PathBuf}, + str::FromStr, +}; + +#[cfg(feature = "walkdir")] +mod find; + +/// The solidity compiler can only reference files that exist locally on your computer. +/// So importing directly from GitHub (as an example) is not possible. +/// +/// Let's imagine you want to use OpenZeppelin's amazing library of smart contracts, +/// `@openzeppelin/contracts-ethereum-package`: +/// +/// ```ignore +/// pragma solidity 0.5.11; +/// +/// import "@openzeppelin/contracts-ethereum-package/contracts/math/SafeMath.sol"; +/// +/// contract MyContract { +/// using SafeMath for uint256; +/// ... +/// } +/// ``` +/// +/// When using `solc`, you have to specify the following: +/// +/// - A `prefix`: the path that's used in your smart contract, i.e. +/// `@openzeppelin/contracts-ethereum-package` +/// - A `target`: the absolute path of the downloaded contracts on your computer +/// +/// The format looks like this: `solc prefix=target ./MyContract.sol` +/// +/// For example: +/// +/// ```text +/// solc --bin \ +/// @openzeppelin/contracts-ethereum-package=/Your/Absolute/Path/To/@openzeppelin/contracts-ethereum-package \ +/// ./MyContract.sol +/// ``` +/// +/// You can also specify a `context` which limits the scope of the remapping to a subset of your +/// project. This allows you to apply the remapping only to imports located in a specific library or +/// a specific file. Without a context a remapping is applied to every matching import in all files. +/// +/// The format is: `solc context:prefix=target ./MyContract.sol` +/// +/// [Source](https://ethereum.stackexchange.com/questions/74448/what-are-remappings-and-how-do-they-work-in-solidity) +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Remapping { + pub context: Option, + pub name: String, + pub path: String, +} + +impl Remapping { + /// Convenience function for [`RelativeRemapping::new`] + pub fn into_relative(self, root: &Path) -> RelativeRemapping { + RelativeRemapping::new(self, root) + } + + /// Removes the `base` path from the remapping + pub fn strip_prefix(&mut self, base: &Path) -> &mut Self { + if let Ok(stripped) = Path::new(&self.path).strip_prefix(base) { + self.path = stripped.display().to_string(); + } + self + } +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, thiserror::Error)] +pub enum RemappingError { + #[error("invalid remapping format, found `{0}`, expected `=`")] + InvalidRemapping(String), + #[error("remapping key can't be empty, found `{0}`, expected `=`")] + EmptyRemappingKey(String), + #[error("remapping value must be a path, found `{0}`, expected `=`")] + EmptyRemappingValue(String), +} + +impl FromStr for Remapping { + type Err = RemappingError; + + fn from_str(remapping: &str) -> Result { + let (name, path) = remapping + .split_once('=') + .ok_or_else(|| RemappingError::InvalidRemapping(remapping.to_string()))?; + let (context, name) = name + .split_once(':') + .map_or((None, name), |(context, name)| (Some(context.to_string()), name)); + if name.trim().is_empty() { + return Err(RemappingError::EmptyRemappingKey(remapping.to_string())); + } + if path.trim().is_empty() { + return Err(RemappingError::EmptyRemappingValue(remapping.to_string())); + } + // if the remapping just starts with : (no context name), treat it as global + let context = context.filter(|c| !c.trim().is_empty()); + Ok(Self { context, name: name.to_string(), path: path.to_string() }) + } +} + +impl Serialize for Remapping { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'de> Deserialize<'de> for Remapping { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::de::Deserializer<'de>, + { + let remapping = String::deserialize(deserializer)?; + Self::from_str(&remapping).map_err(serde::de::Error::custom) + } +} + +// Remappings are printed as `prefix=target` +impl fmt::Display for Remapping { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut s = String::new(); + if let Some(context) = self.context.as_ref() { + #[cfg(target_os = "windows")] + { + // ensure we have `/` slashes on windows + use path_slash::PathExt; + s.push_str(&std::path::Path::new(context).to_slash_lossy()); + } + #[cfg(not(target_os = "windows"))] + { + s.push_str(context); + } + s.push(':'); + } + let name = + if !self.name.ends_with('/') { format!("{}/", self.name) } else { self.name.clone() }; + s.push_str(&{ + #[cfg(target_os = "windows")] + { + // ensure we have `/` slashes on windows + use path_slash::PathExt; + format!("{}={}", name, std::path::Path::new(&self.path).to_slash_lossy()) + } + #[cfg(not(target_os = "windows"))] + { + format!("{}={}", name, self.path) + } + }); + + if !s.ends_with('/') { + s.push('/'); + } + f.write_str(&s) + } +} + +impl Remapping { + /// Converts any `\\` separators in the `path` to `/`. + pub fn slash_path(&mut self) { + #[cfg(windows)] + { + use path_slash::PathExt; + self.path = Path::new(&self.path).to_slash_lossy().to_string(); + if let Some(context) = self.context.as_mut() { + *context = Path::new(&context).to_slash_lossy().to_string(); + } + } + } +} + +/// A relative [`Remapping`] that's aware of the current location +/// +/// See [`RelativeRemappingPathBuf`] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct RelativeRemapping { + pub context: Option, + pub name: String, + pub path: RelativeRemappingPathBuf, +} + +impl RelativeRemapping { + /// Creates a new `RelativeRemapping` starting prefixed with `root` + pub fn new(remapping: Remapping, root: &Path) -> Self { + Self { + context: remapping.context.map(|c| { + RelativeRemappingPathBuf::with_root(root, c).path.to_string_lossy().to_string() + }), + name: remapping.name, + path: RelativeRemappingPathBuf::with_root(root, remapping.path), + } + } + + /// Converts this relative remapping into an absolute remapping + /// + /// This sets to root of the remapping to the given `root` path + pub fn to_remapping(mut self, root: PathBuf) -> Remapping { + self.path.parent = Some(root); + self.into() + } + + /// Converts this relative remapping into [`Remapping`] without the root path + pub fn to_relative_remapping(mut self) -> Remapping { + self.path.parent.take(); + self.into() + } +} + +// Remappings are printed as `prefix=target` +impl fmt::Display for RelativeRemapping { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut s = String::new(); + if let Some(context) = self.context.as_ref() { + #[cfg(target_os = "windows")] + { + // ensure we have `/` slashes on windows + use path_slash::PathExt; + s.push_str(&std::path::Path::new(context).to_slash_lossy()); + } + #[cfg(not(target_os = "windows"))] + { + s.push_str(context); + } + s.push(':'); + } + s.push_str(&{ + #[cfg(target_os = "windows")] + { + // ensure we have `/` slashes on windows + use path_slash::PathExt; + format!("{}={}", self.name, self.path.original().to_slash_lossy()) + } + #[cfg(not(target_os = "windows"))] + { + format!("{}={}", self.name, self.path.original().display()) + } + }); + + if !s.ends_with('/') { + s.push('/'); + } + f.write_str(&s) + } +} + +impl From for Remapping { + fn from(r: RelativeRemapping) -> Self { + let RelativeRemapping { context, mut name, path } = r; + let mut path = path.relative().display().to_string(); + if !path.ends_with('/') { + path.push('/'); + } + if !name.ends_with('/') { + name.push('/'); + } + Self { context, name, path } + } +} + +impl From for RelativeRemapping { + fn from(r: Remapping) -> Self { + Self { context: r.context, name: r.name, path: r.path.into() } + } +} + +/// The path part of the [`Remapping`] that knows the path of the file it was configured in, if any. +/// +/// A [`Remapping`] is intended to be absolute, but paths in configuration files are often desired +/// to be relative to the configuration file itself. For example, a path of +/// `weird-erc20/=lib/weird-erc20/src/` configured in a file `/var/foundry.toml` might be desired to +/// resolve as a `weird-erc20/=/var/lib/weird-erc20/src/` remapping. +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct RelativeRemappingPathBuf { + pub parent: Option, + pub path: PathBuf, +} + +impl RelativeRemappingPathBuf { + /// Creates a new `RelativeRemappingPathBuf` that checks if the `path` is a child path of + /// `parent`. + pub fn with_root( + parent: impl AsRef + Into, + path: impl AsRef + Into, + ) -> Self { + if let Ok(path) = path.as_ref().strip_prefix(parent.as_ref()) { + Self { parent: Some(parent.into()), path: path.to_path_buf() } + } else if path.as_ref().has_root() { + Self { parent: None, path: path.into() } + } else { + Self { parent: Some(parent.into()), path: path.into() } + } + } + + /// Returns the path as it was declared, without modification. + pub fn original(&self) -> &Path { + &self.path + } + + /// Returns this path relative to the file it was declared in, if any. + /// Returns the original if this path was not declared in a file or if the + /// path has a root. + pub fn relative(&self) -> PathBuf { + if self.original().has_root() { + return self.original().into(); + } + self.parent + .as_ref() + .map(|p| p.join(self.original())) + .unwrap_or_else(|| self.original().into()) + } +} + +impl> From

for RelativeRemappingPathBuf { + fn from(path: P) -> Self { + Self { parent: None, path: path.into() } + } +} + +impl Serialize for RelativeRemapping { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'de> Deserialize<'de> for RelativeRemapping { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::de::Deserializer<'de>, + { + let remapping = String::deserialize(deserializer)?; + let remapping = Remapping::from_str(&remapping).map_err(serde::de::Error::custom)?; + Ok(Self { context: remapping.context, name: remapping.name, path: remapping.path.into() }) + } +} + +#[cfg(test)] +mod tests { + pub use super::*; + pub use similar_asserts::assert_eq; + + #[test] + fn relative_remapping() { + let remapping = "oz=a/b/c/d"; + let remapping = Remapping::from_str(remapping).unwrap(); + + let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b/c")); + assert_eq!(relative.path.relative(), Path::new(&remapping.path)); + assert_eq!(relative.path.original(), Path::new("d")); + + let relative = RelativeRemapping::new(remapping.clone(), Path::new("x/y")); + assert_eq!(relative.path.relative(), Path::new("x/y/a/b/c/d")); + assert_eq!(relative.path.original(), Path::new(&remapping.path)); + + let remapping = "oz=/a/b/c/d"; + let remapping = Remapping::from_str(remapping).unwrap(); + let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b")); + assert_eq!(relative.path.relative(), Path::new(&remapping.path)); + assert_eq!(relative.path.original(), Path::new(&remapping.path)); + assert!(relative.path.parent.is_none()); + + let relative = RelativeRemapping::new(remapping, Path::new("/a/b")); + assert_eq!(relative.to_relative_remapping(), Remapping::from_str("oz/=c/d/").unwrap()); + } + + #[test] + fn remapping_errors() { + let remapping = "oz=../b/c/d"; + let remapping = Remapping::from_str(remapping).unwrap(); + assert_eq!(remapping.name, "oz".to_string()); + assert_eq!(remapping.path, "../b/c/d".to_string()); + + let err = Remapping::from_str("").unwrap_err(); + matches!(err, RemappingError::InvalidRemapping(_)); + + let err = Remapping::from_str("oz=").unwrap_err(); + matches!(err, RemappingError::EmptyRemappingValue(_)); + } + + #[test] + fn can_resolve_contexts() { + let remapping = "context:oz=a/b/c/d"; + let remapping = Remapping::from_str(remapping).unwrap(); + + assert_eq!( + remapping, + Remapping { + context: Some("context".to_string()), + name: "oz".to_string(), + path: "a/b/c/d".to_string(), + } + ); + assert_eq!(remapping.to_string(), "context:oz/=a/b/c/d/".to_string()); + + let remapping = "context:foo=C:/bar/src/"; + let remapping = Remapping::from_str(remapping).unwrap(); + + assert_eq!( + remapping, + Remapping { + context: Some("context".to_string()), + name: "foo".to_string(), + path: "C:/bar/src/".to_string() + } + ); + } + + #[test] + fn can_resolve_global_contexts() { + let remapping = ":oz=a/b/c/d/"; + let remapping = Remapping::from_str(remapping).unwrap(); + + assert_eq!( + remapping, + Remapping { context: None, name: "oz".to_string(), path: "a/b/c/d/".to_string() } + ); + assert_eq!(remapping.to_string(), "oz/=a/b/c/d/".to_string()); + } +} diff --git a/crates/artifacts/solc/src/sources.rs b/crates/artifacts/solc/src/sources.rs index 01a9b0e22..01c520864 100644 --- a/crates/artifacts/solc/src/sources.rs +++ b/crates/artifacts/solc/src/sources.rs @@ -1,6 +1,4 @@ -use alloy_primitives::hex; -use foundry_compilers_core::{error::SolcIoError, utils}; -use md5::Digest; +use foundry_compilers_core::error::SolcIoError; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -9,6 +7,9 @@ use std::{ sync::Arc, }; +#[cfg(feature = "walkdir")] +use foundry_compilers_core::utils; + type SourcesInner = BTreeMap; /// An ordered list of files and their source. @@ -142,11 +143,13 @@ impl Source { } /// Recursively finds all source files under the given dir path and reads them all + #[cfg(feature = "walkdir")] pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result { Self::read_all_files(utils::source_files(dir, extensions)) } /// Recursively finds all solidity and yul files under the given dir path and reads them all + #[cfg(feature = "walkdir")] pub fn read_sol_yul_from(dir: &Path) -> Result { Self::read_all_from(dir, utils::SOLC_EXTENSIONS) } @@ -175,6 +178,7 @@ impl Source { /// /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator /// contains at least several paths or the files are rather large. + #[cfg(feature = "rayon")] pub fn par_read_all(files: I) -> Result where I: IntoIterator, @@ -191,12 +195,10 @@ impl Source { .map(Sources) } - /// Generate a non-cryptographically secure checksum of the file's content + /// Generate a non-cryptographically secure checksum of the file's content. + #[cfg(feature = "checksum")] pub fn content_hash(&self) -> String { - let mut hasher = md5::Md5::new(); - hasher.update(self); - let result = hasher.finalize(); - hex::encode(result) + alloy_primitives::hex::encode(::digest(self.content.as_bytes())) } } @@ -217,6 +219,7 @@ impl Source { } /// Finds all source files under the given dir path and reads them all + #[cfg(feature = "walkdir")] pub async fn async_read_all_from( dir: &Path, extensions: &[&str], diff --git a/crates/compilers/Cargo.toml b/crates/compilers/Cargo.toml index d5a2d9b7e..97a98bfae 100644 --- a/crates/compilers/Cargo.toml +++ b/crates/compilers/Cargo.toml @@ -15,8 +15,12 @@ exclude.workspace = true workspace = true [dependencies] -foundry-compilers-artifacts.workspace = true -foundry-compilers-core.workspace = true +foundry-compilers-artifacts = { workspace = true, features = [ + "checksum", + "walkdir", + "rayon", +] } +foundry-compilers-core = { workspace = true, features = ["regex"] } serde.workspace = true semver.workspace = true alloy-primitives.workspace = true @@ -38,7 +42,7 @@ dyn-clone = "1" derive_more = { version = "1", features = ["debug"] } home = "0.5" dirs = "5.0" -itertools = "0.13" +itertools = ">=0.13, <=0.14" # project-util tempfile = { version = "3.9", optional = true } diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 09512aaa9..8cdc379b6 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -18,25 +18,37 @@ workspace = true alloy-primitives.workspace = true cfg-if.workspace = true dunce.workspace = true -path-slash.workspace = true -regex.workspace = true semver.workspace = true serde_json.workspace = true serde.workspace = true thiserror.workspace = true -walkdir.workspace = true +# regex +regex = { workspace = true, optional = true } + +# walkdir +walkdir = { workspace = true, optional = true } + +# svm-solc svm = { workspace = true, optional = true } +# async tokio = { workspace = true, optional = true } +# project-util tempfile = { workspace = true, optional = true } fs_extra = { version = "1.3", optional = true } +[target.'cfg(windows)'.dependencies] +path-slash.workspace = true + [dev-dependencies] tempfile.workspace = true [features] +async = ["dep:tokio"] project-util = ["dep:tempfile", "dep:fs_extra"] +regex = ["dep:regex"] svm-solc = ["dep:svm", "dep:tokio"] -async = ["dep:tokio"] +walkdir = ["dep:walkdir"] + test-utils = ["dep:tempfile"] diff --git a/crates/core/src/utils.rs b/crates/core/src/utils/mod.rs similarity index 66% rename from crates/core/src/utils.rs rename to crates/core/src/utils/mod.rs index 4a591527a..a49afec16 100644 --- a/crates/core/src/utils.rs +++ b/crates/core/src/utils/mod.rs @@ -3,53 +3,25 @@ use crate::error::{SolcError, SolcIoError}; use alloy_primitives::{hex, keccak256}; use cfg_if::cfg_if; -use regex::{Match, Regex}; use semver::{Version, VersionReq}; use serde::{de::DeserializeOwned, Serialize}; use std::{ - collections::HashSet, fs, io::Write, ops::Range, path::{Component, Path, PathBuf}, sync::LazyLock as Lazy, }; -use walkdir::WalkDir; - -/// A regex that matches the import path and identifier of a solidity import -/// statement with the named groups "path", "id". -// Adapted from -pub static RE_SOL_IMPORT: Lazy = Lazy::new(|| { - Regex::new(r#"import\s+(?:(?:"(?P.*)"|'(?P.*)')(?:\s+as\s+\w+)?|(?:(?:\w+(?:\s+as\s+\w+)?|\*\s+as\s+\w+|\{\s*(?:\w+(?:\s+as\s+\w+)?(?:\s*,\s*)?)+\s*\})\s+from\s+(?:"(?P.*)"|'(?P.*)')))\s*;"#).unwrap() -}); - -/// A regex that matches an alias within an import statement -pub static RE_SOL_IMPORT_ALIAS: Lazy = - Lazy::new(|| Regex::new(r#"(?:(?P\w+)|\*|'|")\s+as\s+(?P\w+)"#).unwrap()); - -/// A regex that matches the version part of a solidity pragma -/// as follows: `pragma solidity ^0.5.2;` => `^0.5.2` -/// statement with the named group "version". -// Adapted from -pub static RE_SOL_PRAGMA_VERSION: Lazy = - Lazy::new(|| Regex::new(r"pragma\s+solidity\s+(?P.+?);").unwrap()); - -/// A regex that matches the SDPX license identifier -/// statement with the named group "license". -pub static RE_SOL_SDPX_LICENSE_IDENTIFIER: Lazy = - Lazy::new(|| Regex::new(r"///?\s*SPDX-License-Identifier:\s*(?P.+)").unwrap()); - -/// A regex used to remove extra lines in flatenned files -pub static RE_THREE_OR_MORE_NEWLINES: Lazy = Lazy::new(|| Regex::new("\n{3,}").unwrap()); - -/// A regex that matches version pragma in a Vyper -pub static RE_VYPER_VERSION: Lazy = - Lazy::new(|| Regex::new(r"#(?:pragma version|@version)\s+(?P.+)").unwrap()); - -/// A regex that matches the contract names in a Solidity file. -pub static RE_CONTRACT_NAMES: Lazy = Lazy::new(|| { - Regex::new(r"\b(?:contract|library|abstract\s+contract|interface)\s+([\w$]+)").unwrap() -}); + +#[cfg(feature = "regex")] +mod re; +#[cfg(feature = "regex")] +pub use re::*; + +#[cfg(feature = "walkdir")] +mod wd; +#[cfg(feature = "walkdir")] +pub use wd::*; /// Extensions acceptable by solc compiler. pub const SOLC_EXTENSIONS: &[&str] = &["sol", "yul"]; @@ -102,11 +74,6 @@ pub static SUPPORTS_BASE_PATH: Lazy = pub static SUPPORTS_INCLUDE_PATH: Lazy = Lazy::new(|| VersionReq::parse(">=0.8.8").unwrap()); -/// Create a regex that matches any library or contract name inside a file -pub fn create_contract_or_lib_name_regex(name: &str) -> Regex { - Regex::new(&format!(r#"(?:using\s+(?P{name})\s+|is\s+(?:\w+\s*,\s*)*(?P{name})(?:\s*,\s*\w+)*|(?:(?P(?:function|error|as)\s+|\n[^\n]*(?:"([^"\n]|\\")*|'([^'\n]|\\')*))|\W+)(?P{name})(?:\.|\(| ))"#)).unwrap() -} - /// Move a range by a specified offset pub fn range_by_offset(range: &Range, offset: isize) -> Range { Range { @@ -115,106 +82,6 @@ pub fn range_by_offset(range: &Range, offset: isize) -> Range { } } -/// Returns all path parts from any solidity import statement in a string, -/// `import "./contracts/Contract.sol";` -> `"./contracts/Contract.sol"`. -/// -/// See also -pub fn find_import_paths(contract: &str) -> impl Iterator> { - RE_SOL_IMPORT.captures_iter(contract).filter_map(|cap| { - cap.name("p1") - .or_else(|| cap.name("p2")) - .or_else(|| cap.name("p3")) - .or_else(|| cap.name("p4")) - }) -} - -/// Returns the solidity version pragma from the given input: -/// `pragma solidity ^0.5.2;` => `^0.5.2` -pub fn find_version_pragma(contract: &str) -> Option> { - RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version") -} - -/// Returns an iterator that yields all solidity/yul files funder under the given root path or the -/// `root` itself, if it is a sol/yul file -/// -/// This also follows symlinks. -pub fn source_files_iter<'a>( - root: &Path, - extensions: &'a [&'a str], -) -> impl Iterator + 'a { - WalkDir::new(root) - .follow_links(true) - .into_iter() - .filter_map(Result::ok) - .filter(|e| e.file_type().is_file()) - .filter(|e| { - e.path().extension().map(|ext| extensions.iter().any(|e| ext == *e)).unwrap_or_default() - }) - .map(|e| e.path().into()) -} - -/// Returns a list of absolute paths to all the solidity files under the root, or the file itself, -/// if the path is a solidity file. -/// -/// This also follows symlinks. -/// -/// NOTE: this does not resolve imports from other locations -/// -/// # Examples -/// -/// ```no_run -/// use foundry_compilers_core::utils; -/// let sources = utils::source_files("./contracts".as_ref(), &utils::SOLC_EXTENSIONS); -/// ``` -pub fn source_files(root: &Path, extensions: &[&str]) -> Vec { - source_files_iter(root, extensions).collect() -} - -/// Same as [source_files] but only returns files acceptable by Solc compiler. -pub fn sol_source_files(root: &Path) -> Vec { - source_files(root, SOLC_EXTENSIONS) -} - -/// Returns a list of _unique_ paths to all folders under `root` that contain at least one solidity -/// file (`*.sol`). -/// -/// # Examples -/// -/// ```no_run -/// use foundry_compilers_core::utils; -/// let dirs = utils::solidity_dirs("./lib".as_ref()); -/// ``` -/// -/// for following layout will return -/// `["lib/ds-token/src", "lib/ds-token/src/test", "lib/ds-token/lib/ds-math/src", ...]` -/// -/// ```text -/// lib -/// └── ds-token -/// ├── lib -/// │ ├── ds-math -/// │ │ └── src/Contract.sol -/// │ ├── ds-stop -/// │ │ └── src/Contract.sol -/// │ ├── ds-test -/// │ └── src//Contract.sol -/// └── src -/// ├── base.sol -/// ├── test -/// │ ├── base.t.sol -/// └── token.sol -/// ``` -pub fn solidity_dirs(root: &Path) -> Vec { - let sources = sol_source_files(root); - sources - .iter() - .filter_map(|p| p.parent()) - .collect::>() - .into_iter() - .map(|p| p.to_path_buf()) - .collect() -} - /// Returns the source name for the given source path, the ancestors of the root path. /// /// `/Users/project/sources/contract.sol` -> `sources/contracts.sol` @@ -268,11 +135,18 @@ pub fn normalize_solidity_import_path( let cleaned = clean_solidity_path(&original); // this is to align the behavior with `canonicalize` - use path_slash::PathExt; - let normalized = PathBuf::from(dunce::simplified(&cleaned).to_slash_lossy().as_ref()); + let normalized = dunce::simplified(&cleaned); + #[cfg(windows)] + let normalized = { + use path_slash::PathExt; + PathBuf::from(normalized.to_slash_lossy().as_ref()) + }; + #[cfg(not(windows))] + let normalized = PathBuf::from(normalized); // checks if the path exists without reading its content and obtains an io error if it doesn't. - normalized.metadata().map(|_| normalized).map_err(|err| SolcIoError::new(err, original)) + let _ = normalized.metadata().map_err(|err| SolcIoError::new(err, original))?; + Ok(normalized) } // This function lexically cleans the given path. @@ -404,26 +278,6 @@ pub fn resolve_absolute_library( None } -/// Reads the list of Solc versions that have been installed in the machine. -/// -/// The version list is sorted in ascending order. -/// -/// Checks for installed solc versions under the given path as `/`, -/// (e.g.: `~/.svm/0.8.10`) and returns them sorted in ascending order. -pub fn installed_versions(root: &Path) -> Result, SolcError> { - let mut versions: Vec<_> = walkdir::WalkDir::new(root) - .max_depth(1) - .into_iter() - .filter_map(std::result::Result::ok) - .filter(|e| e.file_type().is_dir()) - .filter_map(|e: walkdir::DirEntry| { - e.path().file_name().and_then(|v| Version::parse(v.to_string_lossy().as_ref()).ok()) - }) - .collect(); - versions.sort(); - Ok(versions) -} - /// Returns the 36 char (deprecated) fully qualified name placeholder /// /// If the name is longer than 36 char, then the name gets truncated, @@ -530,26 +384,6 @@ pub fn find_fave_or_alt_path(root: &Path, fave: &str, alt: &str) -> PathBuf { p } -/// Attempts to find a file with different case that exists next to the `non_existing` file -pub fn find_case_sensitive_existing_file(non_existing: &Path) -> Option { - let non_existing_file_name = non_existing.file_name()?; - let parent = non_existing.parent()?; - WalkDir::new(parent) - .max_depth(1) - .into_iter() - .filter_map(Result::ok) - .filter(|e| e.file_type().is_file()) - .find_map(|e| { - let existing_file_name = e.path().file_name()?; - if existing_file_name.eq_ignore_ascii_case(non_existing_file_name) - && existing_file_name != non_existing_file_name - { - return Some(e.path().to_path_buf()); - } - None - }) -} - cfg_if! { if #[cfg(any(feature = "async", feature = "svm-solc"))] { use tokio::runtime::{Handle, Runtime}; @@ -625,27 +459,6 @@ pub fn create_parent_dir_all(file: &Path) -> Result<(), SolcError> { Ok(()) } -/// Given the regex and the target string, find all occurrences of named groups within the string. -/// -/// This method returns the tuple of matches `(a, b)` where `a` is the match for the entire regex -/// and `b` is the match for the first named group. -/// -/// NOTE: This method will return the match for the first named group, so the order of passed named -/// groups matters. -pub fn capture_outer_and_inner<'a>( - content: &'a str, - regex: ®ex::Regex, - names: &[&str], -) -> Vec<(regex::Match<'a>, regex::Match<'a>)> { - regex - .captures_iter(content) - .filter_map(|cap| { - let cap_match = names.iter().find_map(|name| cap.name(name)); - cap_match.and_then(|m| cap.get(0).map(|outer| (outer.to_owned(), m))) - }) - .collect() -} - #[cfg(any(test, feature = "test-utils"))] // pub fn touch(path: &std::path::Path) -> std::io::Result<()> { @@ -673,24 +486,8 @@ pub fn mkdir_or_touch(tmp: &std::path::Path, paths: &[&str]) { #[cfg(test)] mod tests { - use super::*; - use std::fs::{create_dir_all, File}; - - #[test] - fn can_find_different_case() { - let tmp_dir = tempdir("out").unwrap(); - let path = tmp_dir.path().join("forge-std"); - create_dir_all(&path).unwrap(); - let existing = path.join("Test.sol"); - let non_existing = path.join("test.sol"); - fs::write(&existing, b"").unwrap(); - - #[cfg(target_os = "linux")] - assert!(!non_existing.exists()); - - let found = find_case_sensitive_existing_file(&non_existing).unwrap(); - assert_eq!(found, existing); - } + pub use super::*; + pub use std::fs::{create_dir_all, File}; #[test] fn can_create_parent_dirs_with_ext() { @@ -725,82 +522,6 @@ mod tests { assert!(!is_local_source_name(&[tmp_dir.path()], "ds-test/test.sol")); } - #[test] - fn can_find_solidity_sources() { - let tmp_dir = tempdir("contracts").unwrap(); - - let file_a = tmp_dir.path().join("a.sol"); - let file_b = tmp_dir.path().join("a.sol"); - let nested = tmp_dir.path().join("nested"); - let file_c = nested.join("c.sol"); - let nested_deep = nested.join("deep"); - let file_d = nested_deep.join("d.sol"); - File::create(&file_a).unwrap(); - File::create(&file_b).unwrap(); - create_dir_all(nested_deep).unwrap(); - File::create(&file_c).unwrap(); - File::create(&file_d).unwrap(); - - let files: HashSet<_> = sol_source_files(tmp_dir.path()).into_iter().collect(); - let expected: HashSet<_> = [file_a, file_b, file_c, file_d].into(); - assert_eq!(files, expected); - } - - #[test] - fn can_parse_curly_bracket_imports() { - let s = - r#"import {ReentrancyGuard} from "@openzeppelin/contracts/utils/ReentrancyGuard.sol";"#; - let imports: Vec<_> = find_import_paths(s).map(|m| m.as_str()).collect(); - assert_eq!(imports, vec!["@openzeppelin/contracts/utils/ReentrancyGuard.sol"]) - } - - #[test] - fn can_find_single_quote_imports() { - let content = r" -// SPDX-License-Identifier: MIT -pragma solidity 0.8.6; - -import '@openzeppelin/contracts/access/Ownable.sol'; -import '@openzeppelin/contracts/utils/Address.sol'; - -import './../interfaces/IJBDirectory.sol'; -import './../libraries/JBTokens.sol'; - "; - let imports: Vec<_> = find_import_paths(content).map(|m| m.as_str()).collect(); - - assert_eq!( - imports, - vec![ - "@openzeppelin/contracts/access/Ownable.sol", - "@openzeppelin/contracts/utils/Address.sol", - "./../interfaces/IJBDirectory.sol", - "./../libraries/JBTokens.sol", - ] - ); - } - - #[test] - fn can_find_import_paths() { - let s = r#"//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -import "hardhat/console.sol"; -import "../contract/Contract.sol"; -import { T } from "../Test.sol"; -import { T } from '../Test2.sol'; -"#; - assert_eq!( - vec!["hardhat/console.sol", "../contract/Contract.sol", "../Test.sol", "../Test2.sol"], - find_import_paths(s).map(|m| m.as_str()).collect::>() - ); - } - #[test] - fn can_find_version() { - let s = r"//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -"; - assert_eq!(Some("^0.8.0"), find_version_pragma(s).map(|s| s.as_str())); - } - #[test] fn can_normalize_solidity_import_path() { let dir = tempfile::tempdir().unwrap(); diff --git a/crates/core/src/utils/re.rs b/crates/core/src/utils/re.rs new file mode 100644 index 000000000..6136e5d29 --- /dev/null +++ b/crates/core/src/utils/re.rs @@ -0,0 +1,143 @@ +use regex::{Match, Regex}; +use std::sync::LazyLock as Lazy; + +/// A regex that matches the import path and identifier of a solidity import +/// statement with the named groups "path", "id". +// Adapted from +pub static RE_SOL_IMPORT: Lazy = Lazy::new(|| { + Regex::new(r#"import\s+(?:(?:"(?P.*)"|'(?P.*)')(?:\s+as\s+\w+)?|(?:(?:\w+(?:\s+as\s+\w+)?|\*\s+as\s+\w+|\{\s*(?:\w+(?:\s+as\s+\w+)?(?:\s*,\s*)?)+\s*\})\s+from\s+(?:"(?P.*)"|'(?P.*)')))\s*;"#).unwrap() +}); + +/// A regex that matches an alias within an import statement +pub static RE_SOL_IMPORT_ALIAS: Lazy = + Lazy::new(|| Regex::new(r#"(?:(?P\w+)|\*|'|")\s+as\s+(?P\w+)"#).unwrap()); + +/// A regex that matches the version part of a solidity pragma +/// as follows: `pragma solidity ^0.5.2;` => `^0.5.2` +/// statement with the named group "version". +// Adapted from +pub static RE_SOL_PRAGMA_VERSION: Lazy = + Lazy::new(|| Regex::new(r"pragma\s+solidity\s+(?P.+?);").unwrap()); + +/// A regex that matches the SDPX license identifier +/// statement with the named group "license". +pub static RE_SOL_SDPX_LICENSE_IDENTIFIER: Lazy = + Lazy::new(|| Regex::new(r"///?\s*SPDX-License-Identifier:\s*(?P.+)").unwrap()); + +/// A regex used to remove extra lines in flatenned files +pub static RE_THREE_OR_MORE_NEWLINES: Lazy = Lazy::new(|| Regex::new("\n{3,}").unwrap()); + +/// A regex that matches version pragma in a Vyper +pub static RE_VYPER_VERSION: Lazy = + Lazy::new(|| Regex::new(r"#(?:pragma version|@version)\s+(?P.+)").unwrap()); + +/// A regex that matches the contract names in a Solidity file. +pub static RE_CONTRACT_NAMES: Lazy = Lazy::new(|| { + Regex::new(r"\b(?:contract|library|abstract\s+contract|interface)\s+([\w$]+)").unwrap() +}); + +/// Create a regex that matches any library or contract name inside a file +pub fn create_contract_or_lib_name_regex(name: &str) -> Regex { + Regex::new(&format!(r#"(?:using\s+(?P{name})\s+|is\s+(?:\w+\s*,\s*)*(?P{name})(?:\s*,\s*\w+)*|(?:(?P(?:function|error|as)\s+|\n[^\n]*(?:"([^"\n]|\\")*|'([^'\n]|\\')*))|\W+)(?P{name})(?:\.|\(| ))"#)).unwrap() +} + +/// Returns all path parts from any solidity import statement in a string, +/// `import "./contracts/Contract.sol";` -> `"./contracts/Contract.sol"`. +/// +/// See also +pub fn find_import_paths(contract: &str) -> impl Iterator> { + RE_SOL_IMPORT.captures_iter(contract).filter_map(|cap| { + cap.name("p1") + .or_else(|| cap.name("p2")) + .or_else(|| cap.name("p3")) + .or_else(|| cap.name("p4")) + }) +} + +/// Returns the solidity version pragma from the given input: +/// `pragma solidity ^0.5.2;` => `^0.5.2` +pub fn find_version_pragma(contract: &str) -> Option> { + RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version") +} + +/// Given the regex and the target string, find all occurrences of named groups within the string. +/// +/// This method returns the tuple of matches `(a, b)` where `a` is the match for the entire regex +/// and `b` is the match for the first named group. +/// +/// NOTE: This method will return the match for the first named group, so the order of passed named +/// groups matters. +pub fn capture_outer_and_inner<'a>( + content: &'a str, + regex: ®ex::Regex, + names: &[&str], +) -> Vec<(regex::Match<'a>, regex::Match<'a>)> { + regex + .captures_iter(content) + .filter_map(|cap| { + let cap_match = names.iter().find_map(|name| cap.name(name)); + cap_match.and_then(|m| cap.get(0).map(|outer| (outer.to_owned(), m))) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn can_find_import_paths() { + let s = r#"//SPDX-License-Identifier: Unlicense +pragma solidity ^0.8.0; +import "hardhat/console.sol"; +import "../contract/Contract.sol"; +import { T } from "../Test.sol"; +import { T } from '../Test2.sol'; +"#; + assert_eq!( + vec!["hardhat/console.sol", "../contract/Contract.sol", "../Test.sol", "../Test2.sol"], + find_import_paths(s).map(|m| m.as_str()).collect::>() + ); + } + + #[test] + fn can_find_version() { + let s = r"//SPDX-License-Identifier: Unlicense +pragma solidity ^0.8.0; +"; + assert_eq!(Some("^0.8.0"), find_version_pragma(s).map(|s| s.as_str())); + } + + #[test] + fn can_parse_curly_bracket_imports() { + let s = + r#"import {ReentrancyGuard} from "@openzeppelin/contracts/utils/ReentrancyGuard.sol";"#; + let imports: Vec<_> = find_import_paths(s).map(|m| m.as_str()).collect(); + assert_eq!(imports, vec!["@openzeppelin/contracts/utils/ReentrancyGuard.sol"]) + } + + #[test] + fn can_find_single_quote_imports() { + let content = r" +// SPDX-License-Identifier: MIT +pragma solidity 0.8.6; + +import '@openzeppelin/contracts/access/Ownable.sol'; +import '@openzeppelin/contracts/utils/Address.sol'; + +import './../interfaces/IJBDirectory.sol'; +import './../libraries/JBTokens.sol'; + "; + let imports: Vec<_> = find_import_paths(content).map(|m| m.as_str()).collect(); + + assert_eq!( + imports, + vec![ + "@openzeppelin/contracts/access/Ownable.sol", + "@openzeppelin/contracts/utils/Address.sol", + "./../interfaces/IJBDirectory.sol", + "./../libraries/JBTokens.sol", + ] + ); + } +} diff --git a/crates/core/src/utils/wd.rs b/crates/core/src/utils/wd.rs new file mode 100644 index 000000000..4f67ae06a --- /dev/null +++ b/crates/core/src/utils/wd.rs @@ -0,0 +1,171 @@ +use super::SOLC_EXTENSIONS; +use crate::error::SolcError; +use semver::Version; +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; +use walkdir::WalkDir; + +/// Returns an iterator that yields all solidity/yul files funder under the given root path or the +/// `root` itself, if it is a sol/yul file +/// +/// This also follows symlinks. +pub fn source_files_iter<'a>( + root: &Path, + extensions: &'a [&'a str], +) -> impl Iterator + 'a { + WalkDir::new(root) + .follow_links(true) + .into_iter() + .filter_map(Result::ok) + .filter(|e| e.file_type().is_file()) + .filter(|e| { + e.path().extension().map(|ext| extensions.iter().any(|e| ext == *e)).unwrap_or_default() + }) + .map(|e| e.path().into()) +} + +/// Returns a list of absolute paths to all the solidity files under the root, or the file itself, +/// if the path is a solidity file. +/// +/// This also follows symlinks. +/// +/// NOTE: this does not resolve imports from other locations +/// +/// # Examples +/// +/// ```no_run +/// use foundry_compilers_core::utils; +/// let sources = utils::source_files("./contracts".as_ref(), &utils::SOLC_EXTENSIONS); +/// ``` +pub fn source_files(root: &Path, extensions: &[&str]) -> Vec { + source_files_iter(root, extensions).collect() +} + +/// Same as [source_files] but only returns files acceptable by Solc compiler. +pub fn sol_source_files(root: &Path) -> Vec { + source_files(root, SOLC_EXTENSIONS) +} + +/// Returns a list of _unique_ paths to all folders under `root` that contain at least one solidity +/// file (`*.sol`). +/// +/// # Examples +/// +/// ```no_run +/// use foundry_compilers_core::utils; +/// let dirs = utils::solidity_dirs("./lib".as_ref()); +/// ``` +/// +/// for following layout will return +/// `["lib/ds-token/src", "lib/ds-token/src/test", "lib/ds-token/lib/ds-math/src", ...]` +/// +/// ```text +/// lib +/// └── ds-token +/// ├── lib +/// │ ├── ds-math +/// │ │ └── src/Contract.sol +/// │ ├── ds-stop +/// │ │ └── src/Contract.sol +/// │ ├── ds-test +/// │ └── src//Contract.sol +/// └── src +/// ├── base.sol +/// ├── test +/// │ ├── base.t.sol +/// └── token.sol +/// ``` +pub fn solidity_dirs(root: &Path) -> Vec { + let sources = sol_source_files(root); + sources + .iter() + .filter_map(|p| p.parent()) + .collect::>() + .into_iter() + .map(|p| p.to_path_buf()) + .collect() +} + +/// Reads the list of Solc versions that have been installed in the machine. +/// +/// The version list is sorted in ascending order. +/// +/// Checks for installed solc versions under the given path as `/`, +/// (e.g.: `~/.svm/0.8.10`) and returns them sorted in ascending order. +pub fn installed_versions(root: &Path) -> Result, SolcError> { + let mut versions: Vec<_> = walkdir::WalkDir::new(root) + .max_depth(1) + .into_iter() + .filter_map(std::result::Result::ok) + .filter(|e| e.file_type().is_dir()) + .filter_map(|e: walkdir::DirEntry| { + e.path().file_name().and_then(|v| Version::parse(v.to_string_lossy().as_ref()).ok()) + }) + .collect(); + versions.sort(); + Ok(versions) +} + +/// Attempts to find a file with different case that exists next to the `non_existing` file +pub fn find_case_sensitive_existing_file(non_existing: &Path) -> Option { + let non_existing_file_name = non_existing.file_name()?; + let parent = non_existing.parent()?; + WalkDir::new(parent) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + .filter(|e| e.file_type().is_file()) + .find_map(|e| { + let existing_file_name = e.path().file_name()?; + if existing_file_name.eq_ignore_ascii_case(non_existing_file_name) + && existing_file_name != non_existing_file_name + { + return Some(e.path().to_path_buf()); + } + None + }) +} + +#[cfg(test)] +mod tests { + use super::{super::tests::*, *}; + + #[test] + fn can_find_solidity_sources() { + let tmp_dir = tempdir("contracts").unwrap(); + + let file_a = tmp_dir.path().join("a.sol"); + let file_b = tmp_dir.path().join("a.sol"); + let nested = tmp_dir.path().join("nested"); + let file_c = nested.join("c.sol"); + let nested_deep = nested.join("deep"); + let file_d = nested_deep.join("d.sol"); + File::create(&file_a).unwrap(); + File::create(&file_b).unwrap(); + create_dir_all(nested_deep).unwrap(); + File::create(&file_c).unwrap(); + File::create(&file_d).unwrap(); + + let files: HashSet<_> = sol_source_files(tmp_dir.path()).into_iter().collect(); + let expected: HashSet<_> = [file_a, file_b, file_c, file_d].into(); + assert_eq!(files, expected); + } + + #[test] + fn can_find_different_case() { + let tmp_dir = tempdir("out").unwrap(); + let path = tmp_dir.path().join("forge-std"); + create_dir_all(&path).unwrap(); + let existing = path.join("Test.sol"); + let non_existing = path.join("test.sol"); + fs::write(&existing, b"").unwrap(); + + #[cfg(target_os = "linux")] + assert!(!non_existing.exists()); + + let found = find_case_sensitive_existing_file(&non_existing).unwrap(); + assert_eq!(found, existing); + } +}