diff --git a/.github/workflows/sourcify_all_chains.yml b/.github/workflows/sourcify_all_chains.yml
new file mode 100644
index 0000000000..a0274e93a0
--- /dev/null
+++ b/.github/workflows/sourcify_all_chains.yml
@@ -0,0 +1,30 @@
+name: "sourcify_all_chains"
+
+on:
+ workflow_dispatch:
+ inputs:
+ check_bindings:
+ description: "Check name bindings on contracts, failing if there's any unresolved symbol."
+ type: "boolean"
+ required: false
+ default: false
+ check_infer_version:
+ description: "Check that we can infer the correct language version based on the available version pragmas."
+ type: "boolean"
+ required: false
+ default: false
+jobs:
+ run_tests:
+ uses: "./.github/workflows/sourcify_single_chain.yml"
+ with:
+ chain_id: "${{ matrix.chain_id }}"
+ check_bindings: "${{ inputs.check_bindings }}"
+ check_infer_version: "${{ inputs.check_infer_version }}"
+ strategy:
+ fail-fast: false
+ matrix:
+ chain_id:
+ - 1 # Ethereum Mainnet
+ - 42161 # Arbitrum One
+ - 8453 # Base
+ - 137 # Polygon Mainnet
diff --git a/.github/workflows/sourcify_single_chain.yml b/.github/workflows/sourcify_single_chain.yml
new file mode 100644
index 0000000000..e0f6029974
--- /dev/null
+++ b/.github/workflows/sourcify_single_chain.yml
@@ -0,0 +1,153 @@
+name: "sourcify_single_chain"
+
+on:
+ # Run using manual triggers from GitHub UI:
+ # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
+ workflow_dispatch:
+ inputs:
+ chain_id:
+ description: "ID of the chain to run against."
+ type: "number"
+ required: true
+ default: 1
+ check_bindings:
+ description: "Check name bindings on contracts, failing if there's any unresolved symbol."
+ type: "boolean"
+ required: false
+ default: false
+ check_infer_version:
+ description: "Check that we can infer the correct language version based on the available version pragmas."
+ type: "boolean"
+ required: false
+ default: false
+ workflow_call:
+ inputs:
+ chain_id:
+ description: "ID of the chain to run against: https://docs.sourcify.dev/docs/chains/"
+ type: "number"
+ required: true
+ default: 1
+ check_bindings:
+ description: "Check name bindings on contracts, failing if there's any unresolved symbol."
+ type: "boolean"
+ required: false
+ default: false
+ check_infer_version:
+ description: "Check that we can infer the correct language version based on the available version pragmas."
+ type: "boolean"
+ required: false
+ default: false
+jobs:
+ singleShard:
+ runs-on: "ubuntu-22.04" # _SLANG_DEV_CONTAINER_BASE_IMAGE_ (keep in sync)
+ outputs:
+ __SLANG_SOURCIFY_SHARD_RESULTS__0: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__0 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__1: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__1 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__2: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__2 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__3: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__3 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__4: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__4 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__5: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__5 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__6: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__6 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__7: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__7 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__8: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__8 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__9: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__9 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__10: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__10 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__11: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__11 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__12: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__12 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__13: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__13 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__14: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__14 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__15: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__15 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__16: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__16 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__17: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__17 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__18: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__18 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__19: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__19 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__20: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__20 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__21: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__21 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__22: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__22 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__23: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__23 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__24: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__24 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__25: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__25 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__26: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__26 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__27: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__27 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__28: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__28 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__29: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__29 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__30: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__30 }}"
+ __SLANG_SOURCIFY_SHARD_RESULTS__31: "${{ steps.output-shard-results.outputs.__SLANG_SOURCIFY_SHARD_RESULTS__31 }}"
+
+ strategy:
+ fail-fast: false # Continue running all shards even if some fail.
+ matrix:
+ shard_index:
+ - 0
+ - 1
+ - 2
+ - 3
+ - 4
+ - 5
+ - 6
+ - 7
+ - 8
+ - 9
+ - 10
+ - 11
+ - 12
+ - 13
+ - 14
+ - 15
+ - 16
+ - 17
+ - 18
+ - 19
+ - 20
+ - 21
+ - 22
+ - 23
+ - 24
+ - 25
+ - 26
+ - 27
+ - 28
+ - 29
+ - 30
+ - 31
+
+ env:
+ SHARD_COUNT: 32 # Length of the 'shard_index' array above.
+
+ steps:
+ - name: "Checkout Repository"
+ uses: "actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683"
+
+ - name: "Restore Cache"
+ uses: "./.github/actions/cache/restore"
+
+ - name: "infra setup"
+ run: "./scripts/bin/infra setup"
+
+ - name: "infra run solidity_testing_sourcify"
+ run: "./scripts/bin/infra run --release --bin solidity_testing_sourcify -- test --shard-count ${{ env.SHARD_COUNT }} --shard-index ${{ matrix.shard_index }} ${{ inputs.check_bindings == true && '--check-bindings' || '' }} ${{ inputs.check_infer_version == true && '--check-infer-version' || '' }} --chain-id ${{ inputs.chain_id }} ${{ inputs.network }}"
+
+ - name: "Write shard results to output"
+ if: "!cancelled()"
+ id: "output-shard-results"
+ run: 'echo "__SLANG_SOURCIFY_SHARD_RESULTS__${{ matrix.shard_index }}=$(cat target/__SLANG_SOURCIFY_SHARD_RESULTS__.json)" >> "$GITHUB_OUTPUT"'
+
+ combinedResults:
+ runs-on: "ubuntu-22.04" # _SLANG_DEV_CONTAINER_BASE_IMAGE_ (keep in sync)
+ needs: "singleShard"
+ if: "!cancelled()"
+ steps:
+ - name: "Checkout Repository"
+ uses: "actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683"
+
+ - name: "Restore Cache"
+ uses: "./.github/actions/cache/restore"
+
+ - name: "infra setup"
+ run: "./scripts/bin/infra setup"
+
+ - name: "Output shards results"
+ run: "echo '${{ toJSON(needs.singleShard.outputs) }}' > __SLANG_SOURCIFY_MATRIX_RESULTS__.json"
+
+ - name: "Show combined results"
+ run: "./scripts/bin/infra run --bin solidity_testing_sourcify -- show-combined-results __SLANG_SOURCIFY_MATRIX_RESULTS__.json"
diff --git a/Cargo.lock b/Cargo.lock
index d9b31145ee..5f0cd63f1b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -799,6 +799,18 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
+[[package]]
+name = "filetime"
+version = "0.2.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "libredox",
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "fnv"
version = "1.0.7"
@@ -1500,6 +1512,17 @@ version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4"
+[[package]]
+name = "libredox"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
+dependencies = [
+ "bitflags 2.5.0",
+ "libc",
+ "redox_syscall",
+]
+
[[package]]
name = "linux-raw-sys"
version = "0.4.14"
@@ -2002,6 +2025,15 @@ dependencies = [
"crossbeam-utils",
]
+[[package]]
+name = "redox_syscall"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3"
+dependencies = [
+ "bitflags 2.5.0",
+]
+
[[package]]
name = "regex"
version = "1.11.1"
@@ -2561,6 +2593,25 @@ dependencies = [
"url",
]
+[[package]]
+name = "solidity_testing_sourcify"
+version = "1.1.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "console",
+ "indicatif",
+ "infra_utils",
+ "rayon",
+ "reqwest",
+ "semver",
+ "serde",
+ "serde_json",
+ "slang_solidity",
+ "tar",
+ "url",
+]
+
[[package]]
name = "spdx"
version = "0.10.8"
@@ -2704,6 +2755,17 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
+[[package]]
+name = "tar"
+version = "0.4.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a"
+dependencies = [
+ "filetime",
+ "libc",
+ "xattr",
+]
+
[[package]]
name = "target-triple"
version = "0.1.3"
@@ -3781,6 +3843,16 @@ dependencies = [
"tap",
]
+[[package]]
+name = "xattr"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e"
+dependencies = [
+ "libc",
+ "rustix 1.0.5",
+]
+
[[package]]
name = "yansi"
version = "0.5.1"
diff --git a/Cargo.toml b/Cargo.toml
index e977aa9a38..3372a4cdef 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -37,6 +37,7 @@ members = [
"crates/solidity/testing/sanctuary",
"crates/solidity/testing/snapshots",
"crates/solidity/testing/solc",
+ "crates/solidity/testing/sourcify",
"crates/testlang/inputs/language",
"crates/testlang/outputs/cargo/crate",
@@ -79,6 +80,7 @@ solidity_testing_perf = { path = "crates/solidity/testing/perf", version = "1.1.
solidity_testing_sanctuary = { path = "crates/solidity/testing/sanctuary", version = "1.1.0" }
solidity_testing_snapshots = { path = "crates/solidity/testing/snapshots", version = "1.1.0" }
solidity_testing_solc = { path = "crates/solidity/testing/solc", version = "1.1.0" }
+solidity_testing_sourcify = { path = "crates/solidity/testing/sourcify", version = "1.1.0" }
slang_testlang = { path = "crates/testlang/outputs/cargo/crate", version = "1.1.0" }
testlang_cargo_tests = { path = "crates/testlang/outputs/cargo/tests", version = "1.1.0" }
@@ -145,6 +147,7 @@ syn = { version = "2.0.100", features = [
"parsing",
"printing",
] }
+tar = { version = "0.4.44" }
tempfile = { version = "3.19.1" }
tera = { version = "1.20.0" }
thiserror = { version = "2.0.12" }
diff --git a/crates/infra/cli/src/commands/run/mod.rs b/crates/infra/cli/src/commands/run/mod.rs
index a5ef65f9a2..ccad5d804f 100644
--- a/crates/infra/cli/src/commands/run/mod.rs
+++ b/crates/infra/cli/src/commands/run/mod.rs
@@ -30,6 +30,9 @@ enum BinaryName {
/// Runs compatibility tests between our language definition and 'solc' actual output.
#[clap(name = "solidity_testing_solc")]
SolidityTestingSolc,
+ /// Tests our parser/binding graph against contracts fetched from the Sourcify dataset.
+ #[clap(name = "solidity_testing_sourcify")]
+ SolidityTestingSourcify,
}
impl RunController {
diff --git a/crates/infra/utils/src/terminal/mod.rs b/crates/infra/utils/src/terminal/mod.rs
index a3dd8cbfce..5a9bce22f5 100644
--- a/crates/infra/utils/src/terminal/mod.rs
+++ b/crates/infra/utils/src/terminal/mod.rs
@@ -35,7 +35,7 @@ impl Terminal {
let message_width = strip_ansi_codes(&message).chars().count();
let terminal_width = Term::stdout().size().1 as usize;
- let spacer_width = terminal_width - message_width - BANNER_GLYPHS;
+ let spacer_width = terminal_width.saturating_sub(message_width - BANNER_GLYPHS);
let left_spacer_width = spacer_width / 2;
let right_spacer_width = spacer_width - left_spacer_width;
diff --git a/crates/solidity/testing/sourcify/Cargo.toml b/crates/solidity/testing/sourcify/Cargo.toml
new file mode 100644
index 0000000000..4389390de3
--- /dev/null
+++ b/crates/solidity/testing/sourcify/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "solidity_testing_sourcify"
+version.workspace = true
+rust-version.workspace = true
+edition.workspace = true
+publish = false
+
+[dependencies]
+anyhow = { workspace = true }
+clap = { workspace = true }
+console = { workspace = true }
+indicatif = { workspace = true }
+infra_utils = { workspace = true }
+rayon = { workspace = true }
+reqwest = { workspace = true, features = ["json"] }
+semver = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+slang_solidity = { workspace = true, features = ["__private_ariadne_errors", "__experimental_bindings_api", "__private_compilation_api"] }
+tar = { workspace = true }
+url = { workspace = true }
+
+[lints]
+workspace = true
diff --git a/crates/solidity/testing/sourcify/src/command.rs b/crates/solidity/testing/sourcify/src/command.rs
new file mode 100644
index 0000000000..e23cc89066
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/command.rs
@@ -0,0 +1,117 @@
+use std::fmt::Display;
+use std::ops::RangeInclusive;
+use std::path::PathBuf;
+use std::str::FromStr;
+
+use clap::{Parser, Subcommand};
+
+#[derive(Debug, Parser)]
+pub struct Cli {
+ #[command(subcommand)]
+ pub command: Commands,
+}
+
+#[derive(Subcommand, Debug)]
+pub enum Commands {
+ Test(TestCommand),
+ ShowCombinedResults(ShowCombinedResultsCommand),
+}
+
+#[derive(Debug, Parser)]
+pub struct TestCommand {
+ /// Chain to pull contracts from. See to get a list of valid chain IDs.
+ /// Defaults to Ethereum Mainnet.
+ #[arg(long, default_value_t = ChainId(1))]
+ pub chain_id: ChainId,
+
+ #[command(flatten)]
+ pub test_options: TestOptions,
+
+ #[command(flatten)]
+ pub sharding_options: ShardingOptions,
+
+ /// Specify a single contract to test using the contract address.
+ #[arg(long, conflicts_with = "shard_count")]
+ pub contract: Option,
+
+ /// Save the fetch archive under `target/` and don't delete it after the test
+ /// is complete. Only used for debugging purposes. Requires you to select a
+ /// specific contract to test using the `--contract` option.
+ #[arg(long, requires = "contract", default_value_t = false)]
+ pub save: bool,
+
+ /// Run tests sequentially, and output extra logging. Tests will run significantly slower
+ /// with this option enabled.
+ #[arg(long, default_value_t = false)]
+ pub trace: bool,
+}
+
+#[derive(Debug, Parser)]
+pub struct ShowCombinedResultsCommand {
+ pub results_file: PathBuf,
+}
+
+#[derive(Debug, Parser)]
+pub struct TestOptions {
+ /// Run bindings tests.
+ #[arg(long, default_value_t = false)]
+ pub check_bindings: bool,
+
+ /// Run version inference tests.
+ #[arg(long, default_value_t = false)]
+ pub check_infer_version: bool,
+}
+
+#[derive(Debug, Parser)]
+pub struct ShardingOptions {
+ /// Divide the dataset into a smaller number of shards. Must be a factor of 256. '`--shard-index`'
+ /// must be included along with this option.
+ #[arg(long, requires = "shard_index")]
+ pub shard_count: Option,
+
+ /// Select a single shard to test. Must be within the range [`0..shard-count`). Required if
+ /// '`--shard-count`' is specified.
+ #[arg(long, requires = "shard_count")]
+ pub shard_index: Option,
+
+ /// If set, will only test contracts under the '`full_match`' category.
+ #[arg(long, default_value_t = false)]
+ pub exclude_partial_matches: bool,
+}
+
+impl ShardingOptions {
+ // clippy complains about the exclusive range below, but its suggestion (change to exclusive range)
+ // doesn't work because we're returning a `RangeInclusive`
+ #[allow(clippy::range_minus_one)]
+ pub fn get_id_range(&self) -> RangeInclusive {
+ if let Some(shard_count) = self.shard_count {
+ let shard_size = u8::try_from(256 / u16::from(shard_count)).unwrap();
+
+ let shard_index = self.shard_index.unwrap();
+ let shard_start = shard_size * shard_index;
+
+ shard_start..=(shard_start + shard_size - 1)
+ } else {
+ 0..=255
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Default)]
+#[repr(transparent)]
+pub struct ChainId(pub u64);
+
+impl Display for ChainId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+impl FromStr for ChainId {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result {
+ let val: u64 = s.parse()?;
+ Ok(ChainId(val))
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/compilation_builder.rs b/crates/solidity/testing/sourcify/src/compilation_builder.rs
new file mode 100644
index 0000000000..67543f7c5c
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/compilation_builder.rs
@@ -0,0 +1,64 @@
+use std::collections::HashSet;
+
+use anyhow::{Error, Result};
+use slang_solidity::compilation::{AddFileResponse, CompilationUnit, InternalCompilationBuilder};
+
+use crate::sourcify::Contract;
+
+pub struct CompilationBuilder<'c> {
+ internal: InternalCompilationBuilder,
+ contract: &'c Contract,
+ seen_files: HashSet,
+}
+
+impl<'c> CompilationBuilder<'c> {
+ pub fn new(contract: &'c Contract) -> Result> {
+ Ok(CompilationBuilder {
+ contract,
+ internal: InternalCompilationBuilder::create(contract.version.clone())?,
+ seen_files: HashSet::new(),
+ })
+ }
+
+ pub fn build(mut self) -> Result {
+ let entrypoint = self.contract.entrypoint().ok_or(Error::msg(format!(
+ "Entrypoint not found in contract {name}",
+ name = self.contract.name
+ )))?;
+
+ self.add_file(&entrypoint)?;
+
+ Ok(self.internal.build())
+ }
+
+ fn add_file(&mut self, filename: &str) -> Result<()> {
+ if !self.seen_files.insert(filename.into()) {
+ return Ok(());
+ }
+
+ let source = self.contract.read_file(filename)?;
+
+ let AddFileResponse { import_paths } = self.internal.add_file(filename.into(), &source);
+
+ for import_path in import_paths {
+ let import_path = import_path.node().unparse();
+ let import_path = import_path
+ .strip_prefix(|c| matches!(c, '"' | '\''))
+ .unwrap()
+ .strip_suffix(|c| matches!(c, '"' | '\''))
+ .unwrap()
+ .trim();
+
+ let import_real_name = self
+ .contract
+ .import_resolver
+ .resolve_import(filename, import_path)
+ .ok_or(Error::msg(format!(
+ "Could not resolve import path {import_path} in source file {filename}"
+ )))?;
+ self.add_file(&import_real_name)?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/events.rs b/crates/solidity/testing/sourcify/src/events.rs
new file mode 100644
index 0000000000..92a957468e
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/events.rs
@@ -0,0 +1,160 @@
+use std::cmp;
+
+use console::Color;
+use indicatif::ProgressBar;
+use infra_utils::github::GitHub;
+
+use crate::reporting::Reporter;
+use crate::results::ShardResults;
+
+const MAX_PRINTED_FAILURES: u64 = 1000;
+
+#[derive(Clone, Copy, Eq, PartialEq)]
+pub enum TestOutcome {
+ Passed,
+ Failed,
+ Unresolved,
+ Incompatible,
+}
+
+pub struct Events {
+ reporter: Reporter,
+
+ all_archives: ProgressBar,
+ current_archive: ProgressBar,
+
+ source_files: ProgressBar,
+
+ passed: ProgressBar,
+ failed: ProgressBar,
+ unresolved: ProgressBar,
+ incompatible: ProgressBar,
+}
+
+impl Events {
+ pub fn new(archives_count: usize, files_count: usize) -> Self {
+ let mut reporter = Reporter::new();
+
+ reporter.add_blank();
+
+ let all_archives = reporter.add_progress("All Archives", archives_count);
+ let current_archive = reporter.add_progress("Current Archives", 0);
+
+ reporter.add_blank();
+
+ let source_files = reporter.add_counter("📄 Source Files", Color::White, files_count);
+
+ reporter.add_blank();
+ reporter.add_label("Contract Stats:");
+
+ let passed = reporter.add_counter("✅ Passed", Color::Green, 0);
+ let failed = reporter.add_counter("❌ Failed", Color::Red, 0);
+ let unresolved = reporter.add_counter("❔ Unresolved", Color::White, 0);
+ let incompatible = reporter.add_counter("❕ Incompatible", Color::White, 0);
+
+ reporter.add_blank();
+
+ Self {
+ reporter,
+
+ all_archives,
+ current_archive,
+
+ source_files,
+
+ passed,
+ failed,
+ unresolved,
+ incompatible,
+ }
+ }
+
+ #[allow(clippy::cast_possible_truncation)]
+ pub fn failure_count(&self) -> usize {
+ self.failed.position() as usize
+ }
+
+ pub fn start_archive(&mut self, contract_count: usize) {
+ self.current_archive.reset();
+ self.current_archive.set_length(contract_count as u64);
+
+ self.reporter.show();
+ }
+
+ pub fn inc_files_count(&self, additional_files: usize) {
+ self.source_files.inc_length(additional_files as u64);
+ }
+
+ pub fn inc_files_processed(&self, files_processed: usize) {
+ self.source_files.inc(files_processed as u64);
+ }
+
+ pub fn finish_archive(&mut self) {
+ self.all_archives.inc(1);
+
+ self.reporter.hide();
+
+ GitHub::collapse_group("Statistics:", || {
+ self.reporter.print_full_report();
+ });
+ }
+
+ pub fn test(&self, outcome: TestOutcome) {
+ self.current_archive.inc(1);
+
+ self.passed.inc_length(1);
+ self.failed.inc_length(1);
+ self.unresolved.inc_length(1);
+ self.incompatible.inc_length(1);
+
+ match outcome {
+ TestOutcome::Passed => self.passed.inc(1),
+ TestOutcome::Failed => self.failed.inc(1),
+ TestOutcome::Unresolved => self.unresolved.inc(1),
+ TestOutcome::Incompatible => self.incompatible.inc(1),
+ };
+ }
+
+ fn test_error(&self, message: impl AsRef) {
+ match self.failed.position().cmp(&MAX_PRINTED_FAILURES) {
+ cmp::Ordering::Less => {
+ self.reporter.println(message);
+ }
+ cmp::Ordering::Equal => {
+ self.reporter.println(format!(
+ "More than {MAX_PRINTED_FAILURES} failures shown. Additional failures will be silent."
+ ));
+ }
+ cmp::Ordering::Greater => {
+ // Don't print any more messages...
+ }
+ };
+ }
+
+ pub fn parse_error(&self, message: impl AsRef) {
+ self.test_error(message);
+ }
+
+ pub fn version_error(&self, message: impl AsRef) {
+ self.test_error(message);
+ }
+
+ pub fn bindings_error(&self, message: impl AsRef) {
+ self.test_error(message);
+ }
+
+ pub fn trace(&self, message: impl AsRef) {
+ self.reporter.println(message);
+ }
+
+ pub fn to_results(&self) -> ShardResults {
+ ShardResults {
+ source_files: self.source_files.position(),
+ passed: self.passed.position(),
+ failed: self.failed.position(),
+ unresolved: self.unresolved.position(),
+ incompatible: self.incompatible.position(),
+ elapsed: self.all_archives.elapsed(),
+ }
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/import_resolver.rs b/crates/solidity/testing/sourcify/src/import_resolver.rs
new file mode 100644
index 0000000000..66f345ec0a
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/import_resolver.rs
@@ -0,0 +1,434 @@
+use std::path::{Component, PathBuf};
+use std::str::FromStr;
+
+use anyhow::{bail, Error, Result};
+use url::Url;
+
+pub struct ImportResolver {
+ import_remaps: Vec,
+ source_maps: Vec,
+}
+
+impl ImportResolver {
+ /// Given an import path from a source file, traverse the remappings to find the real filename of
+ /// the target source file.
+ pub fn resolve_import(&self, source_id: &str, import_path: &str) -> Option {
+ let source_virtual_path = self.get_virtual_path(source_id)?;
+
+ if let Some(remapped_import) = self.remap_import(&source_virtual_path, import_path) {
+ // Paths that have been remapped don't need to go through path resolution.
+ return self.get_source_id(&remapped_import);
+ }
+
+ if import_path.starts_with('@') {
+ return self.get_source_id(import_path);
+ }
+
+ if path_is_url(import_path) {
+ // URL imports don't need path resolution
+ return self.get_source_id(import_path);
+ }
+
+ let source_is_url = path_is_url(&source_virtual_path);
+
+ let resolved_path = if source_is_url {
+ resolve_relative_url_import(&source_virtual_path, import_path).ok()?
+ } else {
+ resolve_relative_import(&source_virtual_path, import_path).ok()?
+ };
+
+ self.get_source_id(&resolved_path).or_else(|| {
+ if source_is_url {
+ // Sometimes imports from URL-imports don't share the URL prefix
+ self.get_source_id(import_path)
+ } else if let Some(remapped_import) =
+ self.remap_import(&source_virtual_path, &resolved_path)
+ {
+ // Sometimes relative paths still need to be remapped after being resolved
+ self.get_source_id(&remapped_import)
+ } else {
+ // All other cases just say we couldn't resolve anything
+ None
+ }
+ })
+ }
+
+ pub fn get_source_id(&self, virtual_path: &str) -> Option {
+ self.source_maps
+ .iter()
+ .find(|source| source.matches_virtual_path(virtual_path))
+ .map(|source| source.source_id.clone())
+ }
+
+ pub fn get_virtual_path(&self, source_id: &str) -> Option {
+ self.source_maps
+ .iter()
+ .find(|source| source.matches_source_id(source_id))
+ .map(|source| source.virtual_path.clone())
+ }
+
+ pub fn sources_count(&self) -> usize {
+ self.source_maps.len()
+ }
+
+ fn remap_import(&self, source_virtual_path: &str, import_path: &str) -> Option {
+ self.import_remaps
+ .iter()
+ .filter(|remap| remap.matches(source_virtual_path, import_path))
+ .reduce(|longest, current| {
+ if current.match_len() > longest.match_len() {
+ current
+ } else {
+ longest
+ }
+ })
+ .map(|remap| import_path.replacen(&remap.prefix, &remap.target, 1))
+ }
+}
+
+struct SourceMap {
+ /// The actual filename for the source file, as found in the archive. This name can
+ /// be used to read the content of a source file.
+ source_id: String,
+ /// The path to the source file in the contract's "virtual filesystem". This is the
+ /// path to the source file as the contract was originally constructed. This value
+ /// should be used when resolving imports to the real source files.
+ virtual_path: String,
+}
+
+impl SourceMap {
+ fn matches_virtual_path(&self, virtual_path: &str) -> bool {
+ self.virtual_path == virtual_path || self.virtual_path.replace("//", "/") == virtual_path
+ }
+
+ fn matches_source_id(&self, source_id: &str) -> bool {
+ self.source_id == source_id
+ }
+}
+
+struct ImportRemap {
+ /// If provided, then this remap only applies to imports inside source files
+ /// whose paths begin with this string.
+ context: Option,
+ /// The prefix value which will be found in the import path and replaced by
+ /// `target`.
+ prefix: String,
+ /// The target virtual path. Replacing `prefix` with `target` in the import
+ /// path from a source file should give you a path that can be looked up
+ /// in `Metadata::sources`.
+ target: String,
+}
+
+impl ImportRemap {
+ fn new(remap_str: &str) -> Result {
+ let Some((context, rest)) = remap_str.split_once(':') else {
+ bail!("{remap_str}: Could not separate context from mapping");
+ };
+
+ let Some((prefix, target)) = rest.split_once('=') else {
+ bail!("{remap_str}: Could not separate prefix and target");
+ };
+
+ Ok(ImportRemap {
+ context: if context.is_empty() {
+ None
+ } else {
+ Some(context.into())
+ },
+ prefix: prefix.into(),
+ target: target.into(),
+ })
+ }
+
+ /// Determine if `self` applies to the import `import_path` found in the file at `source_path`.
+ fn matches(&self, source_path: &str, import_path: &str) -> bool {
+ let context_matches = if let Some(context) = &self.context {
+ source_path.starts_with(context)
+ } else {
+ true
+ };
+
+ context_matches && import_path.starts_with(&self.prefix)
+ }
+
+ /// The `match_size` is the length of the remap context + the length of the remap
+ /// prefix. This is used to compare `ImportRemap`s: if a source file + import path combo matches
+ /// two or more `ImportRemap`s, then it should use the one with the biggest match, since
+ /// that one will be the most specific.
+ fn match_len(&self) -> usize {
+ self.context.as_ref().map_or(0, |c| c.len()) + self.prefix.len()
+ }
+
+ /// Sometimes contracts contain a remapping entry that, for whatever reason,
+ /// is buggy. We can collect buggy remaps here so they're skipped during
+ /// import path resolution.
+ pub fn has_known_bug(&self) -> bool {
+ // Ex Contract: 0x56D47372A66b3f640Bff83E745dE7D10f4B29075
+ // Remapping list includes ":./=remappings.txt/"
+ if self.target.contains("remappings.txt") {
+ return true;
+ }
+
+ false
+ }
+}
+
+impl TryFrom for ImportResolver {
+ type Error = anyhow::Error;
+
+ fn try_from(value: serde_json::Value) -> Result {
+ let import_remaps: Vec = value
+ .get("settings")
+ .and_then(|settings| settings.get("remappings"))
+ .and_then(|remappings| remappings.as_array())
+ .ok_or(Error::msg(
+ "Could not find settings.remappings array entry.",
+ ))
+ .map(|mappings| {
+ mappings
+ .iter()
+ .filter_map(|mapping| mapping.as_str())
+ .filter_map(|m| ImportRemap::new(m).ok())
+ .filter(|remap| !remap.has_known_bug())
+ .collect()
+ })?;
+
+ let source_maps: Vec = value
+ .get("sources")
+ .and_then(|sources| sources.as_object())
+ .ok_or(Error::msg(
+ "Could not get sources entry in contract metadata.",
+ ))
+ .map(|sources| {
+ sources
+ .iter()
+ .filter_map(|(key, value)| {
+ value
+ .get("keccak256")
+ .and_then(|k| k.as_str())
+ .map(|source_id| SourceMap {
+ source_id: source_id.into(),
+ virtual_path: key.clone(),
+ })
+ })
+ .collect()
+ })?;
+
+ Ok(ImportResolver {
+ import_remaps,
+ source_maps,
+ })
+ }
+}
+
+/// Resolve an import path that is relative to `source_path`.
+fn resolve_relative_import(source_path: &str, import_path: &str) -> Result {
+ let source_file_path = PathBuf::from_str(source_path)?;
+ let source_dir = source_file_path.parent().ok_or(Error::msg(format!(
+ "Could not get directory of source file {source_path}"
+ )))?;
+
+ let import_path = PathBuf::from_str(import_path)?;
+
+ let mut resolved_parts = vec![];
+
+ // We're basically doing what `Path::canonicalize()` would do, but we can't use that because
+ // the path must be a real path in the filesystem, and we're operating on "virtual" paths.
+ // We check the first import path component to initialize `resolved_parts`
+ let mut import_path_components = import_path.components();
+ match import_path_components.next().unwrap() {
+ // a/b.sol - an absolute path, so we can ignore `source_path`
+ norm @ Component::Normal(_) => resolved_parts.push(norm),
+ // /a/b.sol
+ root @ Component::RootDir => resolved_parts.push(root),
+ // ./a/b.sol - relative to `source_path`
+ Component::CurDir => resolved_parts.extend(source_dir.components()),
+ // ../a/b.sol - relative, but one dir above `source_dir`
+ Component::ParentDir => {
+ resolved_parts.extend(source_dir.components());
+ resolved_parts.pop();
+ }
+ Component::Prefix(_) => {
+ bail!("Found prefix component in import path, which is not supported")
+ }
+ }
+
+ for component in import_path_components {
+ match component {
+ norm @ Component::Normal(_) => resolved_parts.push(norm),
+ Component::ParentDir => {
+ resolved_parts.pop();
+ }
+ Component::CurDir => {}
+ invalid => bail!("Invalid path component found inside import path: {invalid:?}"),
+ }
+ }
+
+ let resolved_import_path: PathBuf = resolved_parts.iter().collect();
+ Ok(resolved_import_path.to_str().unwrap().into())
+}
+
+/// Resolve an import from a source file which was imported using a URL. These need a bit of special handling
+/// because the resolved path needs to also be a URL.
+fn resolve_relative_url_import(source_path: &str, import_path: &str) -> Result {
+ let url = Url::parse(source_path)?;
+
+ let path = url.path();
+ let path = path.strip_prefix('/').unwrap_or(path);
+
+ let resolved_path = resolve_relative_import(path, import_path)?;
+
+ Ok(format!(
+ "{scheme}://{host}/{resolved_path}",
+ scheme = url.scheme(),
+ host = url.host_str().unwrap(),
+ ))
+}
+
+fn path_is_url(path: &str) -> bool {
+ Url::parse(path)
+ .map(|url| url.is_special())
+ .unwrap_or(false)
+}
+
+#[cfg(test)]
+mod test {
+ use super::{ImportRemap, ImportResolver, SourceMap};
+
+ #[test]
+ fn absolute_import() {
+ let resolver = new_resolver(
+ &[],
+ &[("src/main.sol", "entry"), ("src/a/other.sol", "target")],
+ );
+ test_import(&resolver, "entry", "src/a/other.sol", "target");
+ }
+
+ #[test]
+ fn relative_import() {
+ let resolver = new_resolver(
+ &[],
+ &[("src/main.sol", "target"), ("src/a/other.sol", "entry")],
+ );
+ test_import(&resolver, "entry", "../main.sol", "target");
+ }
+
+ #[test]
+ fn remapped_import() {
+ let resolver = new_resolver(
+ &[":xd=src/a"],
+ &[("src/main.sol", "entry"), ("src/a/other.sol", "target")],
+ );
+ test_import(&resolver, "entry", "xd/other.sol", "target");
+ }
+
+ #[test]
+ fn remapped_import_with_context() {
+ let resolver = new_resolver(
+ &["src/b:xd=src/a"],
+ &[("src/a/other.sol", "target"), ("src/b/extra.sol", "entry")],
+ );
+ test_import(&resolver, "entry", "xd/other.sol", "target");
+ }
+
+ #[test]
+ fn url_import() {
+ let resolver = new_resolver(
+ &[],
+ &[
+ ("src/main.sol", "entry"),
+ ("https://github.com/org/project/main.sol", "target"),
+ ],
+ );
+ test_import(
+ &resolver,
+ "entry",
+ "https://github.com/org/project/main.sol",
+ "target",
+ );
+ }
+
+ #[test]
+ fn relative_import_from_url_source() {
+ let resolver = new_resolver(
+ &[],
+ &[
+ ("https://github.com/org/project/main.sol", "target"),
+ ("https://github.com/org/project/folder/other.sol", "entry"),
+ ],
+ );
+ test_import(&resolver, "entry", "../main.sol", "target");
+ }
+
+ #[test]
+ fn remap_with_relative_path() {
+ // If a remap target contains a relative path, then that path is not expanded/resolved
+ // when sourcify creates the sources list. Instead, it's used verbatim.
+ let resolver = new_resolver(
+ &[":xd=../folder"],
+ &[("main.sol", "entry"), ("../folder/file.sol", "target")],
+ );
+ test_import(&resolver, "entry", "xd/file.sol", "target");
+ }
+
+ #[test]
+ fn dueling_remaps() {
+ let resolver = new_resolver(
+ &["a:@org=node_modules/@org", "b:@org=node_modules/@org-v2"],
+ &[
+ ("a/file.sol", "entry_a"),
+ ("b/file.sol", "entry_b"),
+ ("node_modules/@org/main.sol", "target_a"),
+ ("node_modules/@org-v2/main.sol", "target_b"),
+ ],
+ );
+ test_import(&resolver, "entry_a", "@org/main.sol", "target_a");
+ test_import(&resolver, "entry_b", "@org/main.sol", "target_b");
+ }
+
+ #[test]
+ fn url_source_fallback() {
+ // Sometimes, imports found in a URL source file don't share the URL prefix.
+ // We can fallback on the import path to resolve the source file.
+ let resolver = new_resolver(
+ &[],
+ &[
+ ("https://github.com/org/project/main.sol", "entry"),
+ ("file.sol", "target"),
+ ],
+ );
+ test_import(&resolver, "entry", "file.sol", "target");
+ }
+
+ fn test_import(
+ resolver: &ImportResolver,
+ source_id: &str,
+ import_path: &str,
+ expected_id: &str,
+ ) {
+ let resolved_id = resolver
+ .resolve_import(source_id, import_path)
+ .expect("Could not resolve import");
+ assert_eq!(resolved_id, expected_id);
+ }
+
+ fn new_resolver(remap_strs: &[&str], sources: &[(&str, &str)]) -> ImportResolver {
+ let import_remaps: Vec<_> = remap_strs
+ .iter()
+ .flat_map(|s| ImportRemap::new(s))
+ .collect();
+
+ let source_maps: Vec<_> = sources
+ .iter()
+ .map(|(path, id)| SourceMap {
+ source_id: id.to_owned().into(),
+ virtual_path: path.to_owned().into(),
+ })
+ .collect();
+
+ ImportResolver {
+ import_remaps,
+ source_maps,
+ }
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/main.rs b/crates/solidity/testing/sourcify/src/main.rs
new file mode 100644
index 0000000000..48483915e0
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/main.rs
@@ -0,0 +1,119 @@
+mod command;
+mod compilation_builder;
+mod events;
+mod import_resolver;
+mod reporting;
+mod results;
+mod run;
+mod sourcify;
+
+use std::path::PathBuf;
+
+use anyhow::Result;
+use clap::Parser;
+use command::{Commands, ShowCombinedResultsCommand};
+use events::Events;
+use infra_utils::github::GitHub;
+use infra_utils::paths::PathExtensions;
+use infra_utils::terminal::Terminal;
+use results::{display_all_results, AllResults};
+use run::{run_in_parallel, run_with_trace, test_single_contract};
+use sourcify::{ContractArchive, Manifest};
+
+fn main() -> Result<()> {
+ let command::Cli { command } = command::Cli::parse();
+ match command {
+ Commands::Test(test_command) => run_test_command(test_command),
+ Commands::ShowCombinedResults(results_command) => {
+ run_show_combined_results_command(results_command)
+ }
+ }
+}
+
+// `cmd` is passed by value because it's consumed in the spawned thread,
+// but for whatever reason clippy can't figure that out
+#[allow(clippy::needless_pass_by_value)]
+fn run_test_command(cmd: command::TestCommand) -> Result<()> {
+ Terminal::step(format!("Initialize chain {chain}", chain = cmd.chain_id,));
+
+ let manifest = Manifest::new(cmd.chain_id, &cmd.sharding_options)
+ .inspect_err(|e| eprintln!("Error fetching chain manifest: {e}"))?;
+
+ if let Some(contract) = &cmd.contract {
+ return test_single_contract(&manifest, contract, &cmd.test_options);
+ }
+
+ let archive_count = manifest.archive_count();
+
+ let (tx, rx) = std::sync::mpsc::channel::();
+
+ // Test archives which have been fetched and unpacked
+ let testing_thread = std::thread::spawn(move || -> Events {
+ let mut events = Events::new(archive_count, 0);
+ for archive in rx {
+ println!(
+ "Display path: {} | Len: {}",
+ archive.display_path(),
+ archive.display_path().len()
+ );
+ Terminal::step(archive.display_path());
+
+ events.start_archive(archive.contract_count());
+ if cmd.trace {
+ run_with_trace(&archive, &events, &cmd.test_options);
+ } else {
+ run_in_parallel(&archive, &events, &cmd.test_options);
+ }
+ events.finish_archive();
+
+ if !cmd.save {
+ archive.clean();
+ }
+ }
+
+ events
+ });
+
+ // Fetching the shards in this closure so that it takes ownership of the sender
+ // The sender needs to be dropped so that process_thread can finish
+ let fetcher = |t: std::sync::mpsc::Sender| {
+ for archive_desc in manifest.archives() {
+ match ContractArchive::fetch(archive_desc) {
+ Ok(archive) => t.send(archive).unwrap(),
+ Err(e) => eprintln!("Failed to fetch archive {}:\n{e}", archive_desc.url),
+ }
+ }
+ };
+
+ fetcher(tx);
+
+ let events = testing_thread.join().unwrap();
+
+ if GitHub::is_running_in_ci() {
+ let output_path = PathBuf::from("target").join("__SLANG_SOURCIFY_SHARD_RESULTS__.json");
+ let results = events.to_results();
+ let value = serde_json::to_string(&results)?;
+
+ std::fs::create_dir_all(output_path.parent().unwrap())?;
+ output_path.write_string(value)?;
+ println!("Wrote results to {output_path:?}");
+ }
+
+ let failure_count = events.failure_count();
+ if failure_count > 0 {
+ println!(
+ "\nFound {failure_count} failure(s). Please check the logs above for more information.\n",
+ );
+ }
+
+ Ok(())
+}
+
+fn run_show_combined_results_command(command: ShowCombinedResultsCommand) -> Result<()> {
+ let ShowCombinedResultsCommand { results_file } = command;
+
+ let contents = results_file.read_to_string()?;
+ let all_results: AllResults = serde_json::from_str(&contents)?;
+ display_all_results(&all_results);
+ Ok(())
+}
diff --git a/crates/solidity/testing/sourcify/src/reporting.rs b/crates/solidity/testing/sourcify/src/reporting.rs
new file mode 100644
index 0000000000..5e5e2338ae
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/reporting.rs
@@ -0,0 +1,140 @@
+use std::io::Write;
+use std::time::Duration;
+
+use console::{style, Color, Term};
+use indicatif::{InMemoryTerm, MultiProgress, ProgressBar, ProgressDrawTarget, ProgressStyle};
+
+const TICK_FREQUENCY: Duration = Duration::from_millis(250);
+
+pub struct Reporter {
+ parent: MultiProgress,
+ children: Vec,
+ is_visible: bool,
+}
+
+impl Reporter {
+ pub fn new() -> Self {
+ let mut reporter = Self {
+ parent: MultiProgress::new(),
+ children: vec![],
+ is_visible: true,
+ };
+
+ // [`MultiProgress`] is created visible by default.
+ // Hide until there is data to show:
+ reporter.hide();
+
+ reporter
+ }
+
+ pub fn println(&self, line: impl AsRef) {
+ assert!(self.is_visible);
+
+ self.parent.suspend(|| {
+ println!("{0}", line.as_ref());
+ });
+ }
+
+ pub fn show(&mut self) {
+ assert!(!self.is_visible);
+
+ self.parent.set_draw_target(ProgressDrawTarget::stderr());
+ self.is_visible = true;
+ }
+
+ pub fn hide(&mut self) {
+ assert!(self.is_visible);
+
+ self.parent.clear().unwrap();
+ self.parent.set_draw_target(ProgressDrawTarget::hidden());
+ self.is_visible = false;
+ }
+
+ pub fn print_full_report(&mut self) {
+ assert!(!self.is_visible);
+
+ let (rows, cols) = Term::stdout().size();
+ let buffer = InMemoryTerm::new(rows, cols);
+
+ self.parent
+ .set_draw_target(ProgressDrawTarget::term_like(Box::new(buffer.clone())));
+
+ for child in &self.children {
+ child.disable_steady_tick();
+ child.tick();
+ child.enable_steady_tick(TICK_FREQUENCY);
+ }
+
+ self.parent.set_draw_target(ProgressDrawTarget::hidden());
+
+ std::io::stdout()
+ .write_all(buffer.contents_formatted().as_slice())
+ .unwrap();
+
+ println!();
+ }
+
+ pub fn add_blank(&mut self) {
+ let message = " ".repeat(1000);
+ let template = "{wide_msg}";
+
+ self.add_bar(message, template, 0);
+ }
+
+ pub fn add_label(&mut self, message: &str) {
+ let template = "{wide_msg}";
+ self.add_bar(message, template, 0);
+ }
+
+ pub fn add_progress(&mut self, message: impl Into, total: usize) -> ProgressBar {
+ let template = "[{elapsed_precise}] {msg:^17} [{wide_bar:.cyan/blue}] {human_pos:>5}/{human_len:<5} [ETA: {eta_precise:>3}]";
+
+ self.add_bar(message, template, total)
+ }
+
+ pub fn add_counter(
+ &mut self,
+ message: impl Into,
+ color: Color,
+ total: usize,
+ ) -> ProgressBar {
+ let template = format!(
+ " {{msg:<15}} : {position} : {percent}",
+ position = style("{human_pos:>7}").fg(color).bright(),
+ percent = style("{percent_precise:>7} %").fg(color).bright(),
+ );
+
+ self.add_bar(message, template, total)
+ }
+
+ fn add_bar(
+ &mut self,
+ message: impl Into,
+ template: impl AsRef,
+ total: usize,
+ ) -> ProgressBar {
+ let style = ProgressStyle::with_template(template.as_ref())
+ .unwrap()
+ .progress_chars("#>-");
+
+ let bar = ProgressBar::hidden();
+
+ bar.set_message(message.into());
+ bar.set_style(style);
+ bar.set_length(total as u64);
+ bar.enable_steady_tick(TICK_FREQUENCY);
+
+ self.children.push(bar.clone());
+ self.parent.add(bar)
+ }
+}
+
+impl Drop for Reporter {
+ fn drop(&mut self) {
+ for child in &self.children {
+ child.finish_and_clear();
+
+ self.parent.remove(child);
+ }
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/results.rs b/crates/solidity/testing/sourcify/src/results.rs
new file mode 100644
index 0000000000..617634c665
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/results.rs
@@ -0,0 +1,115 @@
+use std::collections::BTreeMap;
+use std::time::Duration;
+
+use indicatif::{FormattedDuration, HumanCount};
+use serde::de::{Error, Visitor};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct ShardResults {
+ pub source_files: u64,
+ pub passed: u64,
+ pub failed: u64,
+ pub unresolved: u64,
+ pub incompatible: u64,
+ pub elapsed: Duration,
+}
+
+#[derive(Debug)]
+pub struct AllResults {
+ pub shards: BTreeMap,
+}
+
+impl<'de> Deserialize<'de> for AllResults {
+ fn deserialize(deserializer: D) -> std::result::Result
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_map(AllResultsVisitor {})
+ }
+}
+
+struct AllResultsVisitor {}
+
+impl<'de> Visitor<'de> for AllResultsVisitor {
+ type Value = AllResults;
+
+ fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ formatter.write_str("a results map")
+ }
+
+ fn visit_map(self, mut access: M) -> std::result::Result
+ where
+ M: serde::de::MapAccess<'de>,
+ {
+ use serde::de::Unexpected;
+
+ let mut shards: BTreeMap = BTreeMap::new();
+ while let Some((key, value)) = access.next_entry::()? {
+ let shard_index = key
+ .strip_prefix("__SLANG_SOURCIFY_SHARD_RESULTS__")
+ .ok_or(Error::invalid_value(
+ Unexpected::Str(&key),
+ &"a string prefixed with __SLANG_SOURCIFY_SHARD_RESULTS__",
+ ))?
+ .parse()
+ .map_err(|_| {
+ Error::invalid_value(Unexpected::Str(&key), &"a positive shard index")
+ })?;
+ let shard_results = serde_json::from_str(&value).map_err(|_| {
+ Error::invalid_value(
+ Unexpected::Str(&value),
+ &"a JSON string with the shard results",
+ )
+ })?;
+ shards.insert(shard_index, shard_results);
+ }
+
+ Ok(AllResults { shards })
+ }
+}
+
+pub fn display_all_results(all_results: &AllResults) {
+ let mut totals = ShardResults::default();
+ println!("Shard ID | Source files | Passed | Failed | Incompatible | Not found | Elapsed");
+ println!("------------------------------------------------------------------------------------------------");
+ for (shard_index, shard_results) in &all_results.shards {
+ println!(
+ "{shard_index:<8} | \
+ {source_files:>12} | \
+ {passed:>12} | \
+ {failed:>12} | \
+ {unresolved:>12} | \
+ {incompatible:>12} | \
+ {elapsed}",
+ source_files = format!("{}", HumanCount(shard_results.source_files)),
+ passed = format!("{}", HumanCount(shard_results.passed)),
+ failed = format!("{}", HumanCount(shard_results.failed)),
+ unresolved = format!("{}", HumanCount(shard_results.unresolved)),
+ incompatible = format!("{}", HumanCount(shard_results.incompatible)),
+ elapsed = FormattedDuration(shard_results.elapsed),
+ );
+ totals.source_files += shard_results.source_files;
+ totals.passed += shard_results.passed;
+ totals.failed += shard_results.failed;
+ totals.unresolved += shard_results.unresolved;
+ totals.incompatible += shard_results.incompatible;
+ totals.elapsed += shard_results.elapsed;
+ }
+ println!("------------------------------------------------------------------------------------------------");
+ println!(
+ "TOTALS | \
+ {source_files:>12} | \
+ {passed:>12} | \
+ {failed:>12} | \
+ {unresolved:>12} | \
+ {incompatible:>12} | \
+ {elapsed}",
+ source_files = format!("{}", HumanCount(totals.source_files)),
+ passed = format!("{}", HumanCount(totals.passed)),
+ failed = format!("{}", HumanCount(totals.failed)),
+ unresolved = format!("{}", HumanCount(totals.unresolved)),
+ incompatible = format!("{}", HumanCount(totals.incompatible)),
+ elapsed = FormattedDuration(totals.elapsed),
+ );
+}
diff --git a/crates/solidity/testing/sourcify/src/run.rs b/crates/solidity/testing/sourcify/src/run.rs
new file mode 100644
index 0000000000..274cda84e2
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/run.rs
@@ -0,0 +1,288 @@
+use anyhow::{bail, Result};
+use rayon::iter::{ParallelBridge, ParallelIterator};
+use slang_solidity::compilation::CompilationUnit;
+use slang_solidity::cst::{Cursor, NodeKind, NonterminalKind, TerminalKindExtensions, TextRange};
+use slang_solidity::diagnostic::{Diagnostic, Severity};
+use slang_solidity::utils::LanguageFacts;
+
+use crate::command::TestOptions;
+use crate::events::{Events, TestOutcome};
+use crate::sourcify::{Contract, ContractArchive, Manifest};
+
+pub fn test_single_contract(
+ manifest: &Manifest,
+ contract_id: &str,
+ opts: &TestOptions,
+) -> Result<()> {
+ let Some(contract) = manifest.fetch_contract(contract_id) else {
+ bail!("Contract {contract_id} not found");
+ };
+
+ let mut events = Events::new(1, 0);
+
+ events.start_archive(1);
+ run_test(&contract, &events, opts);
+ events.finish_archive();
+
+ Ok(())
+}
+
+pub fn run_with_trace(archive: &ContractArchive, events: &Events, opts: &TestOptions) {
+ for contract in archive.contracts() {
+ events.trace(format!(
+ "[{version}] Starting contract {name}",
+ version = contract.version,
+ name = contract.name
+ ));
+ run_test(&contract, events, opts);
+ events.trace(format!(
+ "[{version}] Finished contract {name}",
+ version = contract.version,
+ name = contract.name
+ ));
+ }
+}
+
+pub fn run_in_parallel(archive: &ContractArchive, events: &Events, opts: &TestOptions) {
+ archive
+ .contracts()
+ .par_bridge()
+ .panic_fuse()
+ .for_each(|contract| run_test(&contract, events, opts));
+}
+
+fn run_test(contract: &Contract, events: &Events, opts: &TestOptions) {
+ if uses_exotic_parser_bug(contract) {
+ events.test(TestOutcome::Incompatible);
+ return;
+ }
+
+ let sources_count = contract.sources_count();
+ events.inc_files_count(sources_count);
+
+ let test_outcome = match contract.create_compilation_unit() {
+ Ok(unit) => {
+ let mut test_outcome = run_parser_check(contract, &unit, events);
+
+ if opts.check_infer_version && test_outcome == TestOutcome::Passed {
+ test_outcome = run_version_inference_check(contract, &unit, events);
+ }
+
+ if opts.check_bindings && test_outcome == TestOutcome::Passed {
+ test_outcome = run_bindings_check(contract, &unit, events);
+ }
+
+ test_outcome
+ }
+ Err(e) => {
+ events.trace(format!(
+ "Failed to compile contract {}: {e}\n{}",
+ contract.name,
+ e.backtrace()
+ ));
+ TestOutcome::Unresolved
+ }
+ };
+
+ events.inc_files_processed(sources_count);
+ events.test(test_outcome);
+}
+
+fn run_parser_check(contract: &Contract, unit: &CompilationUnit, events: &Events) -> TestOutcome {
+ let mut test_outcome = TestOutcome::Passed;
+ for file in unit.files() {
+ if !file.errors().is_empty() {
+ if let Ok(source) = contract.read_file(file.id()) {
+ let source_name = contract
+ .import_resolver
+ .get_virtual_path(file.id())
+ .unwrap_or(file.id().into());
+
+ for error in file.errors() {
+ let msg =
+ slang_solidity::diagnostic::render(error, &source_name, &source, true);
+ events.parse_error(format!(
+ "[{version}] Parse error in contract {contract_name}\n{msg}",
+ contract_name = contract.name,
+ version = contract.version
+ ));
+ }
+ }
+
+ test_outcome = TestOutcome::Failed;
+ }
+ }
+
+ test_outcome
+}
+
+fn run_version_inference_check(
+ contract: &Contract,
+ unit: &CompilationUnit,
+ events: &Events,
+) -> TestOutcome {
+ let mut did_fail = false;
+ for file in unit.files() {
+ if let Ok(source) = contract.read_file(file.id()) {
+ if !LanguageFacts::infer_language_versions(&source).any(|v| *v == contract.version) {
+ let source_name = contract
+ .import_resolver
+ .get_source_id(file.id())
+ .unwrap_or(file.id().into());
+ events.version_error(format!(
+ "[{version}] Could not infer correct version for {contract_name}:{source_name}",
+ version = contract.version,
+ contract_name = contract.name,
+ ));
+ did_fail = true;
+ }
+ } else {
+ did_fail = true;
+ }
+ }
+
+ if did_fail {
+ TestOutcome::Failed
+ } else {
+ TestOutcome::Passed
+ }
+}
+
+fn run_bindings_check(
+ contract: &Contract,
+ compilation_unit: &CompilationUnit,
+ events: &Events,
+) -> TestOutcome {
+ let binding_graph = compilation_unit.binding_graph();
+
+ let mut test_outcome = TestOutcome::Passed;
+ for reference in binding_graph.all_references() {
+ let ref_file = reference.get_file();
+
+ if ref_file.is_built_ins() {
+ // skip built-ins
+ continue;
+ }
+ // We're not interested in the exact definition a reference resolves
+ // to, so we lookup all of them and fail if we find none.
+ if reference.definitions().is_empty() {
+ let cursor = reference.get_cursor().to_owned();
+
+ let source = contract.read_file(ref_file.get_path()).unwrap_or_default();
+
+ let binding_error = BindingError::UnresolvedReference(cursor);
+ let msg = slang_solidity::diagnostic::render(
+ &binding_error,
+ ref_file.get_path(),
+ &source,
+ true,
+ );
+ events.bindings_error(format!(
+ "[{version}] Binding Error: Reference has no definitions\n{msg}",
+ version = contract.version,
+ ));
+
+ test_outcome = TestOutcome::Failed;
+ }
+ }
+
+ // Check that all identifier nodes are bound to either a definition or a reference:
+ for file in compilation_unit.files() {
+ let mut cursor = file.create_tree_cursor();
+ while cursor.go_to_next_terminal() {
+ if !matches!(cursor.node().kind(), NodeKind::Terminal(kind) if kind.is_identifier()) {
+ continue;
+ }
+
+ if matches!(
+ cursor.ancestors().next(),
+ Some(ancestor)
+ // Ignore identifiers in certain pragma contexts
+ // `pragma experimental`: they are unbound feature names
+ // `pragma abicoder`: they are unbound abi version names
+ if ancestor.kind == NonterminalKind::ExperimentalFeature || ancestor.kind == NonterminalKind::AbicoderPragma
+ ) {
+ continue;
+ }
+
+ if binding_graph.definition_at(&cursor).is_none()
+ && binding_graph.reference_at(&cursor).is_none()
+ {
+ let binding_error = BindingError::UnboundIdentifier(cursor.clone());
+
+ if let Ok(source) = contract.read_file(file.id()) {
+ let msg = slang_solidity::diagnostic::render(
+ &binding_error,
+ file.id(),
+ &source,
+ true,
+ );
+ events.bindings_error(format!(
+ "[{version}] Binding Error: No definition or reference\n{msg}",
+ version = contract.version,
+ ));
+ }
+
+ test_outcome = TestOutcome::Failed;
+ }
+ }
+ }
+
+ test_outcome
+}
+
+fn uses_exotic_parser_bug(contract: &Contract) -> bool {
+ static CONTRACTS_WITH_EXOTIC_PARSER_BUGS: &[&str] = &[
+ // 0.4.24: // Accepts malformed `* /` in multi-line comments:
+ // Fixed in 0.4.25: https://github.com/ethereum/solidity/pull/4937
+ "0x79Bb6f4492D5CB13Fad8cA0ecfBccD9e2c26ac42",
+ // 0.5.11: Double `indexed` keyword
+ // Fixed in 0.8.18: https://github.com/ethereum/solidity/blob/develop/Changelog.md#0818-2023-02-01
+ "0x9F4F8Cb4863D3467F03773cC4c172837106C21D8",
+ // 0.5.16: Double `indexed` keyword
+ // Fixed in 0.8.18: https://github.com/ethereum/solidity/blob/develop/Changelog.md#0818-2023-02-01
+ "0xDe201dAec04ba73166d9917Fdf08e1728E270F06",
+ // 0.4.19: Unclosed multi-line comment at EOF
+ "0xf330AA697a1128B7A2D2204F6794afe0cAAF58FC",
+ ];
+
+ CONTRACTS_WITH_EXOTIC_PARSER_BUGS
+ .iter()
+ .any(|c| c == &contract.name)
+}
+
+enum BindingError {
+ UnresolvedReference(Cursor),
+ UnboundIdentifier(Cursor),
+}
+
+impl Diagnostic for BindingError {
+ fn text_range(&self) -> TextRange {
+ let cursor = match self {
+ Self::UnboundIdentifier(cursor) => cursor,
+ Self::UnresolvedReference(cursor) => cursor,
+ };
+ cursor.text_range()
+ }
+
+ fn severity(&self) -> Severity {
+ Severity::Error
+ }
+
+ fn message(&self) -> String {
+ match self {
+ Self::UnresolvedReference(cursor) => {
+ format!(
+ "Unresolved reference to `{symbol}`",
+ symbol = cursor.node().unparse()
+ )
+ }
+ Self::UnboundIdentifier(cursor) => {
+ format!(
+ "Missing identifier or definition for `{symbol}`",
+ symbol = cursor.node().unparse()
+ )
+ }
+ }
+ }
+}
diff --git a/crates/solidity/testing/sourcify/src/sourcify.rs b/crates/solidity/testing/sourcify/src/sourcify.rs
new file mode 100644
index 0000000000..db90bc8a79
--- /dev/null
+++ b/crates/solidity/testing/sourcify/src/sourcify.rs
@@ -0,0 +1,323 @@
+use std::fs;
+use std::io::BufReader;
+use std::path::{Path, PathBuf};
+
+use anyhow::{bail, Error, Result};
+use infra_utils::cargo::CargoWorkspace;
+use infra_utils::paths::PathExtensions;
+use reqwest::blocking::Client;
+use semver::{BuildMetadata, Prerelease, Version};
+use slang_solidity::compilation::CompilationUnit;
+use tar::Archive;
+
+use crate::command::{ChainId, ShardingOptions};
+use crate::compilation_builder::CompilationBuilder;
+use crate::import_resolver::ImportResolver;
+
+pub struct Manifest {
+ /// Description of the archives that are available to fetch.
+ archive_descriptors: Vec,
+}
+
+impl Manifest {
+ pub fn new(chain_id: ChainId, options: &ShardingOptions) -> Result {
+ let client = Client::new();
+ let res = client
+ .get("https://repo-backup.sourcify.dev/manifest.json")
+ .send()?;
+
+ let status = res.status();
+ if !status.is_success() {
+ bail!("Error fetching manifest.json");
+ }
+
+ let obj: serde_json::Value = res.json()?;
+ let mut archive_descriptors: Vec<_> = obj
+ .get("files")
+ .and_then(|files| files.as_array())
+ .map(|files| {
+ files
+ .iter()
+ .filter_map(|file| file.get("path").and_then(|val| val.as_str()))
+ .filter_map(|path| {
+ ArchiveDescriptor::new("https://repo-backup.sourcify.dev", path).ok()
+ })
+ .filter(|desc| desc.matches_chain_and_shard(chain_id, options))
+ .collect()
+ })
+ .unwrap_or_default();
+
+ archive_descriptors.sort_by(|a, b| a.prefix.cmp(&b.prefix));
+
+ if archive_descriptors.is_empty() {
+ return Err(Error::msg(format!(
+ "No valid archive found for chain {chain_id}"
+ )));
+ }
+
+ Ok(Manifest {
+ archive_descriptors,
+ })
+ }
+
+ /// Search for a specific contract and return it if found. Returns `None` if the contract can not
+ /// be fetched for any reason (including if the `contract_id` is not parseable).
+ pub fn fetch_contract(&self, contract_id: &str) -> Option {
+ u8::from_str_radix(contract_id.get(2..4).unwrap(), 16)
+ .ok()
+ .and_then(|contract_prefix| {
+ self.archives()
+ .filter(|desc| desc.prefix == contract_prefix)
+ .flat_map(ContractArchive::fetch)
+ .find_map(|archive| archive.get_contract(contract_id).ok())
+ })
+ }
+
+ pub fn archives(&self) -> impl Iterator- {
+ self.archive_descriptors.iter()
+ }
+
+ pub fn archive_count(&self) -> usize {
+ self.archive_descriptors.len()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum MatchType {
+ Full,
+ Partial,
+}
+
+impl MatchType {
+ pub fn dir_name(self) -> &'static str {
+ match self {
+ MatchType::Full => "full_match",
+ MatchType::Partial => "partial_match",
+ }
+ }
+}
+
+/// Describes an archive that's available in the Sourcify repository.
+/// Can be used by `ContractArchive::fetch()` to download this archive.
+pub struct ArchiveDescriptor {
+ pub prefix: u8,
+ pub chain_id: ChainId,
+ pub match_type: MatchType,
+ /// GET this url to fetch the `ContractArchive` for this `ArchiveDescriptor`.
+ pub url: String,
+}
+
+impl ArchiveDescriptor {
+ fn new(base_url: &str, path_str: &str) -> Result {
+ // File path should come in this format:
+ // /sourcify-repository-2025-03-24T03-00-26/full_match.1.00.tar.gz
+ // - --
+ // | | shard prefix
+ // | chain ID
+ let mut parts = path_str.split('.');
+ let name_prefix = parts.next().unwrap();
+ let match_type = if name_prefix.ends_with("full_match") {
+ MatchType::Full
+ } else if name_prefix.ends_with("partial_match") {
+ MatchType::Partial
+ } else {
+ bail!("Invalid match type in archive path: {}", path_str);
+ };
+
+ let chain_id_part = parts.next().ok_or(Error::msg("Failed to get chain ID"))?;
+ let chain_id: ChainId = chain_id_part.parse()?;
+
+ let prefix_part = parts
+ .next()
+ .ok_or(Error::msg("Failed to get shard prefix"))?;
+ let prefix = u8::from_str_radix(prefix_part, 16)?;
+
+ Ok(ArchiveDescriptor {
+ url: format!("{base_url}{path_str}"),
+ prefix,
+ chain_id,
+ match_type,
+ })
+ }
+
+ fn matches_chain_and_shard(&self, chain_id: ChainId, options: &ShardingOptions) -> bool {
+ if self.match_type == MatchType::Partial && options.exclude_partial_matches {
+ return false;
+ }
+
+ if self.chain_id != chain_id {
+ return false;
+ }
+
+ if !options.get_id_range().contains(&self.prefix) {
+ return false;
+ }
+
+ true
+ }
+
+ /// Get a path that should be used as the target when unpacking the archive
+ /// represented by this `ArchiveDescriptor`.
+ fn archive_dir(&self) -> PathBuf {
+ CargoWorkspace::locate_source_crate("solidity_testing_sourcify")
+ .unwrap_or_default()
+ .join(format!(
+ "target/sourcify_{chain_id}/{prefix:02x}",
+ chain_id = self.chain_id,
+ prefix = self.prefix,
+ ))
+ }
+
+ /// Get the path inside `self.archive_dir()` that contains all of the contracts.
+ /// This path is defined by the archives fetched from Sourcify, and should be updated
+ /// in case Sourcify ever changes its repository format.
+ fn contracts_dir(&self) -> PathBuf {
+ self.archive_dir().join(format!(
+ "repository/{match_type}/{chain_id}",
+ match_type = self.match_type.dir_name(),
+ chain_id = self.chain_id,
+ ))
+ }
+}
+
+#[derive(Clone)]
+pub struct ContractArchive {
+ /// Path to the directory inside this archive which contains all of the contracts.
+ /// Iterate over the entries at this path to read the contracts.
+ contracts_path: PathBuf,
+}
+
+impl ContractArchive {
+ pub fn fetch(desc: &ArchiveDescriptor) -> Result {
+ let client = Client::new();
+ let res = client.get(&desc.url).send()?;
+
+ let status = res.status();
+ if !status.is_success() {
+ bail!("Could not fetch source tarball");
+ }
+
+ let archive_dir = desc.archive_dir();
+
+ let mut archive = Archive::new(res);
+ archive.unpack(&archive_dir)?;
+
+ Ok(ContractArchive {
+ contracts_path: desc.contracts_dir(),
+ })
+ }
+
+ pub fn contracts(&self) -> impl Iterator
- {
+ let dir = fs::read_dir(&self.contracts_path).expect("Could not open contract directory.");
+ dir.flatten().flat_map(|dir_entry| -> Result {
+ let contract_path = dir_entry.path();
+ let contract = Contract::new(&contract_path)
+ .inspect_err(|e| println!("Failed to create contract: {e}"))?;
+
+ Ok(contract)
+ })
+ }
+
+ pub fn get_contract(&self, contract_id: &str) -> Result {
+ let contract_path = self.contracts_path.join(contract_id);
+ Contract::new(&contract_path)
+ }
+
+ pub fn contract_count(&self) -> usize {
+ fs::read_dir(&self.contracts_path)
+ .map(|i| i.count())
+ .unwrap_or(0)
+ }
+
+ pub fn clean(self) {
+ fs::remove_dir_all(&self.contracts_path).unwrap();
+ }
+
+ pub fn display_path(&self) -> String {
+ self.contracts_path
+ .strip_repo_root()
+ .unwrap_or(&self.contracts_path)
+ .to_str()
+ .unwrap()
+ .into()
+ }
+}
+
+/// A single contract, found inside a `ContractArchive`. Source files for this contract have not been read or
+/// processed, but can be found in the directory at `sources_path`.
+pub struct Contract {
+ pub name: String,
+ pub version: Version,
+ pub target: String,
+ pub import_resolver: ImportResolver,
+
+ sources_path: PathBuf,
+}
+
+impl Contract {
+ fn new(contract_path: &Path) -> Result {
+ let name = contract_path
+ .file_name()
+ .unwrap()
+ .to_str()
+ .ok_or(Error::msg("Could not get contract directory name"))?;
+
+ let metadata_file = fs::File::open(contract_path.join("metadata.json"))?;
+ let reader = BufReader::new(metadata_file);
+
+ let metadata_val: serde_json::Value = serde_json::from_reader(reader)?;
+
+ let version = metadata_val
+ .get("compiler")
+ .and_then(|compiler| compiler.get("version"))
+ .and_then(|version_val| version_val.as_str())
+ .ok_or(Error::msg(
+ "Could not get compiler.version from contract metadata",
+ ))
+ .and_then(|version_str| Version::parse(version_str).map_err(Error::new))
+ .map(|mut version| {
+ version.pre = Prerelease::EMPTY;
+ version.build = BuildMetadata::EMPTY;
+ version
+ })?;
+
+ let target = metadata_val
+ .get("settings")
+ .and_then(|settings| settings.get("compilationTarget"))
+ .and_then(|target| target.as_object())
+ .and_then(|target_obj| target_obj.keys().next())
+ .ok_or(Error::msg(
+ "Could not get settings.compilationTarget from contract metadata",
+ ))?
+ .clone();
+
+ let import_resolver: ImportResolver = metadata_val.try_into()?;
+
+ Ok(Contract {
+ target,
+ version,
+ import_resolver,
+ sources_path: contract_path.join("sources"),
+ name: name.into(),
+ })
+ }
+
+ /// Create a `CompilationUnit` for this contract. This includes all available source files and resolves
+ /// imports, accounting for file remapping/renaming. The resulting `CompilationUnit` is ready to check for
+ /// errors.
+ pub fn create_compilation_unit(&self) -> Result {
+ CompilationBuilder::new(self)?.build()
+ }
+
+ pub fn entrypoint(&self) -> Option {
+ self.import_resolver.get_source_id(&self.target)
+ }
+
+ pub fn read_file(&self, name: &str) -> Result {
+ self.sources_path.join(name).read_to_string()
+ }
+
+ pub fn sources_count(&self) -> usize {
+ self.import_resolver.sources_count()
+ }
+}