diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..7cb294e --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +# Preserve line endings in test data files (hashes depend on exact content) +**/test_data/** binary +**/fixtures/** binary diff --git a/.github/workflows/interop.yml b/.github/workflows/interop.yml new file mode 100644 index 0000000..f539a7b --- /dev/null +++ b/.github/workflows/interop.yml @@ -0,0 +1,314 @@ +name: Interoperability Tests + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +env: + CARGO_TERM_COLOR: always + +jobs: + interop: + name: Interoperability Tests + runs-on: ubuntu-latest + permissions: + id-token: write # Required for OIDC token + contents: read + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@1.89.0 + + - name: Install Cosign + uses: sigstore/cosign-installer@v4.0.0 + with: + cosign-release: 'v3.0.2' + + - name: Build release binaries + run: cargo build --release -p sigstore-sign -p sigstore-verify --examples + + # ========================================= + # Setup: Create test artifacts + # ========================================= + - name: Create test artifacts + run: | + echo "Hello from sigstore-rust interop test at $(date)" > test-artifact.txt + echo "Hello from sigstore-rust interop test V2 at $(date)" > test-artifact-v2.txt + echo "Digest-based verification test at $(date)" > digest-test.txt + echo "Test package content for attestation at $(date)" > test-package.txt + + echo "Test artifacts created:" + sha256sum test-artifact.txt test-artifact-v2.txt digest-test.txt test-package.txt + + # Store digest for later use + DIGEST=$(sha256sum digest-test.txt | cut -d' ' -f1) + echo "ARTIFACT_DIGEST=sha256:${DIGEST}" >> $GITHUB_ENV + + # ========================================= + # REKOR V1: Blob Signing Tests + # ========================================= + - name: "[V1] Sign with sigstore-rust" + run: | + ./target/release/examples/sign_blob test-artifact.txt -o rust-signed.sigstore.json + echo "Bundle created:" + cat rust-signed.sigstore.json | jq -r '.mediaType' + + - name: "[V1] Verify sigstore-rust signature with cosign" + run: | + cosign verify-blob \ + --bundle rust-signed.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact.txt + echo "✅ Cosign successfully verified sigstore-rust signature (V1)" + + - name: "[V1] Verify sigstore-rust signature with sigstore-rust" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact.txt rust-signed.sigstore.json + echo "✅ sigstore-rust successfully verified its own signature (V1)" + + - name: "[V1] Sign with cosign" + run: | + cosign version + cosign sign-blob \ + --yes \ + --bundle cosign-signed.sigstore.json \ + test-artifact.txt + echo "Bundle created:" + cat cosign-signed.sigstore.json | jq -r '.mediaType' + + - name: "[V1] Verify cosign signature with sigstore-rust" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact.txt cosign-signed.sigstore.json + echo "✅ sigstore-rust successfully verified cosign signature (V1)" + + - name: "[V1] Verify cosign signature with cosign (sanity check)" + run: | + cosign verify-blob \ + --bundle cosign-signed.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact.txt + echo "✅ Cosign successfully verified its own signature (V1)" + + # ========================================= + # REKOR V2: Blob Signing Tests + # ========================================= + - name: "[V2] Sign with sigstore-rust --v2" + run: | + ./target/release/examples/sign_blob --v2 test-artifact-v2.txt -o rust-signed-v2.sigstore.json + echo "Bundle created:" + cat rust-signed-v2.sigstore.json | jq -r '.mediaType' + echo "Entry kind/version:" + cat rust-signed-v2.sigstore.json | jq -r '.verificationMaterial.tlogEntries[0].kindVersion' + + - name: "[V2] Verify sigstore-rust V2 signature with cosign" + run: | + # V2 bundles use RFC3161 timestamps instead of integratedTime + cosign verify-blob \ + --bundle rust-signed-v2.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + --use-signed-timestamps \ + test-artifact-v2.txt + echo "✅ Cosign successfully verified sigstore-rust V2 signature" + + - name: "[V2] Verify sigstore-rust V2 signature with sigstore-rust" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact-v2.txt rust-signed-v2.sigstore.json + echo "✅ sigstore-rust successfully verified its own V2 signature" + + - name: "[V2] Sign with cosign" + run: | + cosign sign-blob \ + --yes \ + --bundle cosign-signed-v2.sigstore.json \ + test-artifact-v2.txt + echo "Bundle created:" + cat cosign-signed-v2.sigstore.json | jq -r '.mediaType' + + - name: "[V2] Verify cosign V2 signature with sigstore-rust" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact-v2.txt cosign-signed-v2.sigstore.json + echo "✅ sigstore-rust successfully verified cosign V2 signature" + + - name: "[V2] Verify cosign V2 signature with cosign (sanity check)" + run: | + cosign verify-blob \ + --bundle cosign-signed-v2.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact-v2.txt + echo "✅ Cosign successfully verified its own V2 signature" + + # ========================================= + # DIGEST: Digest-based Verification Tests + # ========================================= + - name: "[Digest] Sign with sigstore-rust" + run: | + ./target/release/examples/sign_blob digest-test.txt -o digest-bundle.sigstore.json + + - name: "[Digest] Verify with cosign using digest" + run: | + cosign verify-blob \ + --bundle digest-bundle.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + ${{ env.ARTIFACT_DIGEST }} + echo "✅ Cosign verified using digest" + + - name: "[Digest] Verify with sigstore-rust using digest" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + ${{ env.ARTIFACT_DIGEST }} digest-bundle.sigstore.json + echo "✅ sigstore-rust verified using digest" + + - name: "[Digest] Sign with cosign" + run: | + cosign sign-blob \ + --yes \ + --bundle cosign-digest-bundle.sigstore.json \ + digest-test.txt + + - name: "[Digest] Verify cosign bundle with sigstore-rust using digest" + run: | + ./target/release/examples/verify_bundle \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + ${{ env.ARTIFACT_DIGEST }} cosign-digest-bundle.sigstore.json + echo "✅ sigstore-rust verified cosign bundle using digest" + + # ========================================= + # DSSE: Attestation Tests + # ========================================= + - name: "[DSSE] Attest with sigstore-rust" + run: | + ./target/release/examples/sign_attestation \ + --channel "https://example.com/test-channel" \ + test-package.txt -o rust-attestation.sigstore.json + echo "Attestation bundle created:" + cat rust-attestation.sigstore.json | jq -r '.mediaType' + echo "Entry kind:" + cat rust-attestation.sigstore.json | jq -r '.verificationMaterial.tlogEntries[0].kindVersion.kind' + + - name: "[DSSE] Verify sigstore-rust attestation with cosign" + run: | + cosign verify-blob-attestation \ + --bundle rust-attestation.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + --type "https://schemas.conda.org/attestations-publish-1.schema.json" \ + test-package.txt + echo "✅ Cosign successfully verified sigstore-rust attestation" + + - name: "[DSSE] Verify sigstore-rust attestation with sigstore-rust" + run: | + ./target/release/examples/verify_conda_attestation \ + test-package.txt rust-attestation.sigstore.json + echo "✅ sigstore-rust successfully verified its own attestation" + + - name: "[DSSE] Attest with cosign" + run: | + # Create a predicate for cosign + cat > predicate.json << EOF + { + "targetChannel": "https://example.com/cosign-test-channel" + } + EOF + + cosign attest-blob \ + --yes \ + --bundle cosign-attestation.sigstore.json \ + --predicate predicate.json \ + --type "https://schemas.conda.org/attestations-publish-1.schema.json" \ + test-package.txt + echo "Attestation bundle created:" + cat cosign-attestation.sigstore.json | jq -r '.mediaType' + + - name: "[DSSE] Verify cosign attestation with sigstore-rust" + run: | + ./target/release/examples/verify_conda_attestation \ + test-package.txt cosign-attestation.sigstore.json + echo "✅ sigstore-rust successfully verified cosign attestation" + + - name: "[DSSE] Verify cosign attestation with cosign (sanity check)" + run: | + cosign verify-blob-attestation \ + --bundle cosign-attestation.sigstore.json \ + --certificate-identity-regexp ".*" \ + --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ + --type "https://schemas.conda.org/attestations-publish-1.schema.json" \ + test-package.txt + echo "✅ Cosign successfully verified its own attestation" + + # ========================================= + # SIGSTORE-PYTHON: Cross-implementation verification + # ========================================= + - name: Install sigstore-python + run: | + pip install sigstore + sigstore --version + + - name: "[Python] Verify sigstore-rust V1 bundle" + run: | + # GitHub Actions identity is the workflow file URL + IDENTITY="https://github.com/${{ github.repository }}/.github/workflows/interop.yml@${{ github.ref }}" + echo "Expected identity: $IDENTITY" + sigstore verify identity \ + --bundle rust-signed.sigstore.json \ + --cert-identity "$IDENTITY" \ + --cert-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact.txt + echo "✅ sigstore-python verified sigstore-rust V1 bundle" + + - name: "[Python] Verify sigstore-rust V2 bundle" + run: | + IDENTITY="https://github.com/${{ github.repository }}/.github/workflows/interop.yml@${{ github.ref }}" + sigstore verify identity \ + --bundle rust-signed-v2.sigstore.json \ + --cert-identity "$IDENTITY" \ + --cert-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-artifact-v2.txt + echo "✅ sigstore-python verified sigstore-rust V2 bundle" + + - name: "[Python] Verify sigstore-rust attestation" + run: | + IDENTITY="https://github.com/${{ github.repository }}/.github/workflows/interop.yml@${{ github.ref }}" + sigstore verify identity \ + --bundle rust-attestation.sigstore.json \ + --cert-identity "$IDENTITY" \ + --cert-oidc-issuer "https://token.actions.githubusercontent.com" \ + test-package.txt + echo "✅ sigstore-python verified sigstore-rust attestation" + + # ========================================= + # BUNDLE FORMAT: Structural compatibility checks + # ========================================= + - name: "[Format] Check sigstore-rust V1 bundle format" + run: python scripts/compare_bundles.py --check rust-signed.sigstore.json + + - name: "[Format] Check sigstore-rust V2 bundle format" + run: python scripts/compare_bundles.py --check rust-signed-v2.sigstore.json + + - name: "[Format] Compare V1 bundles (sigstore-rust vs cosign)" + run: python scripts/compare_bundles.py rust-signed.sigstore.json cosign-signed.sigstore.json + + - name: "[Format] Compare attestation bundles (sigstore-rust vs cosign)" + run: python scripts/compare_bundles.py rust-attestation.sigstore.json cosign-attestation.sigstore.json diff --git a/Cargo.lock b/Cargo.lock index 46e18da..366eeea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1688,6 +1688,7 @@ dependencies = [ "const-oid", "hex", "pem", + "regex", "rustls-pki-types", "rustls-webpki", "serde", diff --git a/Cargo.toml b/Cargo.toml index 430b1cc..27fae93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,6 +69,7 @@ chrono = { version = "0.4", features = ["serde"] } # Testing hex = "0.4" rstest = { version = "0.26", default-features = false } +regex = "1.11" # Logging tracing = { version = "0.1" } diff --git a/crates/sigstore-bundle/src/builder.rs b/crates/sigstore-bundle/src/builder.rs index 04e54cb..5be9920 100644 --- a/crates/sigstore-bundle/src/builder.rs +++ b/crates/sigstore-bundle/src/builder.rs @@ -7,8 +7,8 @@ use sigstore_types::{ MessageSignature, Rfc3161Timestamp, SignatureContent, TimestampVerificationData, TransparencyLogEntry, VerificationMaterial, VerificationMaterialContent, }, - Bundle, CanonicalizedBody, DerCertificate, DsseEnvelope, LogKeyId, MediaType, Sha256Hash, - SignatureBytes, SignedTimestamp, TimestampToken, + Bundle, CanonicalizedBody, DerCertificate, DsseEnvelope, LogIndex, LogKeyId, MediaType, + Sha256Hash, SignatureBytes, SignedTimestamp, TimestampToken, }; /// Verification material for v0.3 bundles. @@ -133,11 +133,11 @@ impl BundleV03 { /// Helper to create a transparency log entry. pub struct TlogEntryBuilder { - log_index: u64, + log_index: i64, log_id: String, kind: String, kind_version: String, - integrated_time: u64, + integrated_time: i64, canonicalized_body: Vec, inclusion_promise: Option, inclusion_proof: Option, @@ -175,11 +175,11 @@ impl TlogEntryBuilder { .unwrap_or_else(|_| entry.log_id.to_string()); let mut builder = Self { - log_index: entry.log_index as u64, + log_index: entry.log_index, log_id: log_id_base64, kind: kind.to_string(), kind_version: version.to_string(), - integrated_time: entry.integrated_time as u64, + integrated_time: entry.integrated_time, canonicalized_body: entry.body.as_bytes().to_vec(), inclusion_promise: None, inclusion_proof: None, @@ -207,9 +207,9 @@ impl TlogEntryBuilder { .collect(); builder.inclusion_proof = Some(InclusionProof { - log_index: proof.log_index.to_string().into(), + log_index: LogIndex::new(proof.log_index), root_hash, - tree_size: proof.tree_size.to_string(), + tree_size: proof.tree_size, hashes, checkpoint: CheckpointData { envelope: proof.checkpoint.clone(), @@ -222,13 +222,13 @@ impl TlogEntryBuilder { } /// Set the log index. - pub fn log_index(mut self, index: u64) -> Self { + pub fn log_index(mut self, index: i64) -> Self { self.log_index = index; self } /// Set the integrated time (Unix timestamp). - pub fn integrated_time(mut self, time: u64) -> Self { + pub fn integrated_time(mut self, time: i64) -> Self { self.integrated_time = time; self } @@ -251,16 +251,16 @@ impl TlogEntryBuilder { /// * `checkpoint` - The checkpoint envelope pub fn inclusion_proof( mut self, - log_index: u64, + log_index: i64, root_hash: Sha256Hash, - tree_size: u64, + tree_size: i64, hashes: Vec, checkpoint: String, ) -> Self { self.inclusion_proof = Some(InclusionProof { - log_index: log_index.to_string().into(), + log_index: LogIndex::from(log_index), root_hash, - tree_size: tree_size.to_string(), + tree_size, hashes, checkpoint: CheckpointData { envelope: checkpoint, @@ -272,7 +272,7 @@ impl TlogEntryBuilder { /// Build the transparency log entry. pub fn build(self) -> TransparencyLogEntry { TransparencyLogEntry { - log_index: self.log_index.to_string().into(), + log_index: LogIndex::from(self.log_index), log_id: LogId { key_id: LogKeyId::new(self.log_id), }, @@ -280,13 +280,7 @@ impl TlogEntryBuilder { kind: self.kind, version: self.kind_version, }, - // For V2 entries, integrated_time is 0 and should be omitted from JSON - // (skip_serializing_if = "String::is_empty" handles this) - integrated_time: if self.integrated_time == 0 { - String::new() - } else { - self.integrated_time.to_string() - }, + integrated_time: self.integrated_time, inclusion_promise: self.inclusion_promise, inclusion_proof: self.inclusion_proof, canonicalized_body: CanonicalizedBody::new(self.canonicalized_body), diff --git a/crates/sigstore-bundle/src/validation.rs b/crates/sigstore-bundle/src/validation.rs index b96d7e9..1917a28 100644 --- a/crates/sigstore-bundle/src/validation.rs +++ b/crates/sigstore-bundle/src/validation.rs @@ -141,14 +141,14 @@ fn validate_inclusion_proofs(bundle: &Bundle) -> Result<()> { // Get proof hashes (already decoded as Vec) let proof_hashes: &[Sha256Hash] = &proof.hashes; - // Parse indices + // Get indices (now i64 internally) let leaf_index: u64 = proof .log_index .as_u64() - .map_err(|_| Error::Validation("invalid log_index in proof".to_string()))?; + .ok_or_else(|| Error::Validation("invalid log_index in proof".to_string()))?; let tree_size: u64 = proof .tree_size - .parse() + .try_into() .map_err(|_| Error::Validation("invalid tree_size in proof".to_string()))?; // Get expected root from checkpoint (already a Sha256Hash) diff --git a/crates/sigstore-bundle/tests/bundle_v3_tests.rs b/crates/sigstore-bundle/tests/bundle_v3_tests.rs index c87535a..23cacf1 100644 --- a/crates/sigstore-bundle/tests/bundle_v3_tests.rs +++ b/crates/sigstore-bundle/tests/bundle_v3_tests.rs @@ -73,8 +73,8 @@ fn test_parse_v3_bundle() { // Check tlog entries assert_eq!(bundle.verification_material.tlog_entries.len(), 1); let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.log_index, LogIndex::new("25915956".to_string())); - assert_eq!(entry.integrated_time, "1712085549"); + assert_eq!(entry.log_index, LogIndex::new(25915956)); + assert_eq!(entry.integrated_time, 1712085549); assert_eq!(entry.kind_version.kind, "hashedrekord"); assert_eq!(entry.kind_version.version, "0.0.1"); @@ -84,8 +84,8 @@ fn test_parse_v3_bundle() { // Check inclusion proof details let proof = entry.inclusion_proof.as_ref().unwrap(); - assert_eq!(proof.log_index, LogIndex::new("25901137".to_string())); - assert_eq!(proof.tree_size, "25901138"); + assert_eq!(proof.log_index, LogIndex::new(25901137)); + assert_eq!(proof.tree_size, 25901138); assert_eq!(proof.hashes.len(), 11); } @@ -225,7 +225,7 @@ fn test_inclusion_proof_verification() { // Verify the inclusion proof let leaf_index: u64 = proof.log_index.as_u64().unwrap(); - let tree_size: u64 = proof.tree_size.parse().unwrap(); + let tree_size: u64 = proof.tree_size.try_into().unwrap(); let result = verify_inclusion_proof(&leaf_hash, leaf_index, tree_size, proof_hashes, root_hash); diff --git a/crates/sigstore-sign/examples/sign_attestation.rs b/crates/sigstore-sign/examples/sign_attestation.rs new file mode 100644 index 0000000..5a0ce00 --- /dev/null +++ b/crates/sigstore-sign/examples/sign_attestation.rs @@ -0,0 +1,299 @@ +//! Example: Sign a conda package attestation with Sigstore +//! +//! This example demonstrates how to create an in-toto attestation for a conda package +//! using Sigstore's keyless signing, similar to what GitHub Actions produces. +//! +//! # Usage +//! +//! Sign a conda package (interactive OAuth flow): +//! ```sh +//! cargo run -p sigstore-sign --example sign_attestation -- \ +//! package.conda -o package.sigstore.json +//! ``` +//! +//! Sign with an identity token (e.g., from GitHub Actions): +//! ```sh +//! cargo run -p sigstore-sign --example sign_attestation -- \ +//! --token "$OIDC_TOKEN" \ +//! package.conda -o package.sigstore.json +//! ``` +//! +//! # In GitHub Actions +//! +//! ```yaml +//! jobs: +//! sign: +//! runs-on: ubuntu-latest +//! permissions: +//! id-token: write # Required for OIDC token +//! steps: +//! - uses: actions/checkout@v4 +//! - name: Sign package +//! run: cargo run -p sigstore-sign --example sign_attestation -- package.conda +//! ``` +//! +//! # Example with test data +//! +//! ```sh +//! cargo run -p sigstore-sign --example sign_attestation -- \ +//! crates/sigstore-verify/test_data/bundles/signed-package-2.1.0-hb0f4dca_0.conda +//! ``` + +use sigstore_oidc::{get_ambient_token, get_identity_token, is_ci_environment, IdentityToken}; +use sigstore_sign::{Attestation, SigningConfig, SigningContext}; + +use std::env; +use std::fs; +use std::path::Path; +use std::process; + +#[tokio::main] +async fn main() { + let args: Vec = env::args().collect(); + + // Parse arguments + let mut token: Option = None; + let mut output: Option = None; + let mut staging = false; + let mut channel: Option = None; + let mut positional: Vec = Vec::new(); + + let mut i = 1; + while i < args.len() { + match args[i].as_str() { + "--token" | "-t" => { + i += 1; + if i >= args.len() { + eprintln!("Error: --token requires a value"); + process::exit(1); + } + token = Some(args[i].clone()); + } + "--output" | "-o" => { + i += 1; + if i >= args.len() { + eprintln!("Error: --output requires a value"); + process::exit(1); + } + output = Some(args[i].clone()); + } + "--channel" | "-c" => { + i += 1; + if i >= args.len() { + eprintln!("Error: --channel requires a value"); + process::exit(1); + } + channel = Some(args[i].clone()); + } + "--staging" => { + staging = true; + } + "--help" | "-h" => { + print_usage(&args[0]); + process::exit(0); + } + arg if !arg.starts_with('-') => { + positional.push(arg.to_string()); + } + unknown => { + eprintln!("Error: Unknown option: {}", unknown); + print_usage(&args[0]); + process::exit(1); + } + } + i += 1; + } + + if positional.len() != 1 { + eprintln!("Error: Expected exactly 1 positional argument (package path)"); + print_usage(&args[0]); + process::exit(1); + } + + let package_path = &positional[0]; + let output_path = output.unwrap_or_else(|| format!("{}.sigstore.json", package_path)); + let target_channel = channel.unwrap_or_else(|| "https://example.com/my-channel".to_string()); + + // Read package + let package_bytes = match fs::read(package_path) { + Ok(data) => data, + Err(e) => { + eprintln!("Error reading package '{}': {}", package_path, e); + process::exit(1); + } + }; + + // Get package filename + let package_name = Path::new(package_path) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(package_path); + + println!("Creating attestation for: {}", package_path); + println!(" Package: {}", package_name); + println!(" Size: {} bytes", package_bytes.len()); + + // Compute package hash + let package_hash = sigstore_crypto::sha256(&package_bytes); + println!(" SHA256: {}", hex::encode(package_hash.as_bytes())); + + // Get identity token + let identity_token = match get_token(token).await { + Ok(t) => t, + Err(e) => { + eprintln!("Error obtaining identity token: {}", e); + process::exit(1); + } + }; + + println!(" Identity: {}", identity_token.subject()); + println!(" Issuer: {}", identity_token.issuer()); + + // Get signing config + let config = if staging { + println!(" Using: staging infrastructure"); + SigningConfig::staging() + } else { + println!(" Using: production infrastructure"); + SigningConfig::production() + }; + + println!(" Fulcio URL: {}", config.fulcio_url); + println!(" Rekor URL: {}", config.rekor_url); + if let Some(ref tsa_url) = config.tsa_url { + println!(" TSA URL: {}", tsa_url); + } + + // Create attestation using the high-level API + let predicate = serde_json::json!({ + "targetChannel": target_channel + }); + + let attestation = Attestation::new( + "https://schemas.conda.org/attestations-publish-1.schema.json", + predicate, + ) + .add_subject(package_name, package_hash); + + println!("\nIn-Toto Statement:"); + println!(" Type: https://in-toto.io/Statement/v1"); + println!(" Predicate Type: https://schemas.conda.org/attestations-publish-1.schema.json"); + println!( + " Subject: {} (sha256:{}...)", + package_name, + &hex::encode(package_hash.as_bytes())[..16] + ); + + // Create signing context and sign + let context = SigningContext::with_config(config); + let signer = context.signer(identity_token); + + println!("\nSigning attestation..."); + let bundle = match signer.sign_attestation(attestation).await { + Ok(b) => b, + Err(e) => { + eprintln!("Error signing attestation: {}", e); + process::exit(1); + } + }; + + // Write bundle + let bundle_json = bundle.to_json_pretty().expect("Failed to serialize bundle"); + fs::write(&output_path, &bundle_json).expect("Failed to write bundle"); + + println!("\nAttestation created successfully!"); + println!(" Bundle: {}", output_path); + println!(" Media Type: {}", bundle.media_type); + + // Print tlog entry info + if let Some(entry) = bundle.verification_material.tlog_entries.first() { + println!( + " Entry Kind: {} v{}", + entry.kind_version.kind, entry.kind_version.version + ); + println!(" Log Index: {}", entry.log_index); + let ts = entry.integrated_time; + if ts == 0 { + println!(" Integrated Time: (uses RFC3161 timestamps)"); + } else { + use chrono::{DateTime, Utc}; + if let Some(dt) = DateTime::::from_timestamp(ts, 0) { + println!(" Integrated Time: {}", dt); + } + } + } + + // Print RFC3161 timestamp info + let ts_count = bundle + .verification_material + .timestamp_verification_data + .rfc3161_timestamps + .len(); + if ts_count > 0 { + println!(" RFC3161 Timestamps: {}", ts_count); + } + + println!("\nVerify with:"); + println!( + " cargo run -p sigstore-verify --example verify_conda_attestation -- {} {}", + package_path, output_path + ); +} + +async fn get_token(explicit_token: Option) -> Result { + if let Some(token_str) = explicit_token { + return IdentityToken::from_jwt(&token_str).map_err(|e| format!("Invalid token: {}", e)); + } + + if is_ci_environment() { + println!(" Detected CI environment, using ambient credentials"); + return get_ambient_token() + .await + .map_err(|e| format!("Failed to get ambient token: {}", e)); + } + + println!(" Starting interactive authentication..."); + println!(); + + get_identity_token(|response| { + println!("Please visit: {}", response.verification_uri); + if let Some(complete_uri) = &response.verification_uri_complete { + println!("Or open: {}", complete_uri); + } + println!(); + println!("Enter code: {}", response.user_code); + println!(); + println!("Waiting for authentication..."); + }) + .await + .map_err(|e| format!("OAuth failed: {}", e)) +} + +fn print_usage(program: &str) { + eprintln!("Usage: {} [OPTIONS] ", program); + eprintln!(); + eprintln!("Create a Sigstore attestation for a conda package."); + eprintln!(); + eprintln!("Arguments:"); + eprintln!(" Path to the .conda package file"); + eprintln!(); + eprintln!("Options:"); + eprintln!(" -o, --output Output bundle path (default: .sigstore.json)"); + eprintln!(" -t, --token OIDC identity token (skips interactive auth)"); + eprintln!(" -c, --channel Target channel URL for the attestation"); + eprintln!(" --staging Use Sigstore staging infrastructure"); + eprintln!(" -h, --help Print this help message"); + eprintln!(); + eprintln!("Examples:"); + eprintln!(" # Sign interactively (opens browser for OAuth)"); + eprintln!(" {} package.conda", program); + eprintln!(); + eprintln!(" # Sign with explicit output path"); + eprintln!( + " {} package.conda -o my-attestation.sigstore.json", + program + ); + eprintln!(); + eprintln!(" # Sign with a pre-obtained token"); + eprintln!(" {} --token \"$OIDC_TOKEN\" package.conda", program); +} diff --git a/crates/sigstore-sign/examples/sign_blob.rs b/crates/sigstore-sign/examples/sign_blob.rs index 192b1d0..7dfd0c8 100644 --- a/crates/sigstore-sign/examples/sign_blob.rs +++ b/crates/sigstore-sign/examples/sign_blob.rs @@ -196,14 +196,13 @@ async fn main() { ); println!(" Log Index: {}", entry.log_index); // For V2, integrated_time is always 0 - RFC3161 timestamps are used instead - if let Ok(ts) = entry.integrated_time.parse::() { - if ts == 0 && entry.kind_version.version == "0.0.2" { - println!(" Integrated Time: (V2 uses RFC3161 timestamps)"); - } else { - use chrono::{DateTime, Utc}; - if let Some(dt) = DateTime::::from_timestamp(ts, 0) { - println!(" Integrated Time: {}", dt); - } + let ts = entry.integrated_time; + if ts == 0 && entry.kind_version.version == "0.0.2" { + println!(" Integrated Time: (V2 uses RFC3161 timestamps)"); + } else { + use chrono::{DateTime, Utc}; + if let Some(dt) = DateTime::::from_timestamp(ts, 0) { + println!(" Integrated Time: {}", dt); } } // Show if we have inclusion proof (V2) vs just promise (V1) diff --git a/crates/sigstore-types/src/bundle.rs b/crates/sigstore-types/src/bundle.rs index f9694b2..95f1a74 100644 --- a/crates/sigstore-types/src/bundle.rs +++ b/crates/sigstore-types/src/bundle.rs @@ -7,7 +7,7 @@ use crate::checkpoint::Checkpoint; use crate::dsse::DsseEnvelope; use crate::encoding::{ - CanonicalizedBody, DerCertificate, LogIndex, LogKeyId, Sha256Hash, SignatureBytes, + string_i64, CanonicalizedBody, DerCertificate, LogIndex, LogKeyId, Sha256Hash, SignatureBytes, SignedTimestamp, TimestampToken, }; use crate::error::{Error, Result}; @@ -25,6 +25,11 @@ where Ok(opt.unwrap_or_default()) } +/// Helper for skip_serializing_if to check if i64 is zero +fn is_zero(value: &i64) -> bool { + *value == 0 +} + /// Sigstore bundle media types #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum MediaType { @@ -234,8 +239,9 @@ pub struct TransparencyLogEntry { /// Kind and version of the entry pub kind_version: KindVersion, /// Integrated time (Unix timestamp) - #[serde(default, skip_serializing_if = "String::is_empty")] - pub integrated_time: String, + /// For Rekor V2 entries, this field may be omitted (defaults to 0) + #[serde(default, with = "string_i64", skip_serializing_if = "is_zero")] + pub integrated_time: i64, /// Inclusion promise (Signed Entry Timestamp) #[serde(skip_serializing_if = "Option::is_none")] pub inclusion_promise: Option, @@ -281,7 +287,8 @@ pub struct InclusionProof { /// Root hash of the tree pub root_hash: Sha256Hash, /// Tree size at time of proof - pub tree_size: String, + #[serde(with = "string_i64")] + pub tree_size: i64, /// Hashes in the inclusion proof path #[serde(with = "sha256_hash_vec")] pub hashes: Vec, @@ -349,6 +356,11 @@ pub struct Rfc3161Timestamp { pub signed_timestamp: TimestampToken, } +/// Default media type for bundles that don't specify one (pre-v0.1 format) +fn default_media_type() -> String { + "application/vnd.dev.sigstore.bundle+json;version=0.1".to_string() +} + // Custom Deserialize implementation for Bundle impl<'de> Deserialize<'de> for Bundle { fn deserialize(deserializer: D) -> std::result::Result @@ -358,6 +370,8 @@ impl<'de> Deserialize<'de> for Bundle { #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct BundleHelper { + // Cosign V1 bundles may not have mediaType - default to v0.1 + #[serde(default = "default_media_type")] media_type: String, verification_material: VerificationMaterial, #[serde(flatten)] diff --git a/crates/sigstore-types/src/encoding.rs b/crates/sigstore-types/src/encoding.rs index 630015f..cd69180 100644 --- a/crates/sigstore-types/src/encoding.rs +++ b/crates/sigstore-types/src/encoding.rs @@ -90,6 +90,30 @@ pub mod hex_bytes { } } +/// Serde helper for i64 fields serialized as strings +/// +/// JSON bundles use strings for large integers. This helper serializes +/// i64 values as strings and parses them back. +pub mod string_i64 { + use serde::{Deserialize, Deserializer, Serializer}; + + pub fn serialize(value: &i64, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&value.to_string()) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + s.parse::() + .map_err(|_| serde::de::Error::custom(format!("invalid integer: {}", s))) + } +} + // ============================================================================ // Macro for creating base64-encoded newtype wrappers // ============================================================================ @@ -381,52 +405,41 @@ impl std::fmt::Display for EntryUuid { } } -/// Transparency log index (numeric string) +/// Transparency log index /// -/// Represents a log index in the transparency log. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(transparent)] -pub struct LogIndex(String); +/// Represents a log index in the transparency log. Per the protobuf spec, +/// this is an int64. For JSON serialization, we serialize as an integer but +/// accept both integers and strings for backwards compatibility. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct LogIndex(i64); impl LogIndex { - pub fn new(s: String) -> Self { - LogIndex(s) - } - - pub fn from_u64(index: u64) -> Self { - LogIndex(index.to_string()) - } - - pub fn as_str(&self) -> &str { - &self.0 + pub fn new(index: i64) -> Self { + LogIndex(index) } - pub fn into_string(self) -> String { + pub fn value(&self) -> i64 { self.0 } - pub fn as_u64(&self) -> Result { - self.0 - .parse() - .map_err(|e| Error::InvalidEncoding(format!("invalid log index '{}': {}", self.0, e))) + pub fn as_u64(&self) -> Option { + if self.0 >= 0 { + Some(self.0 as u64) + } else { + None + } } } -impl From for LogIndex { - fn from(s: String) -> Self { - LogIndex::new(s) +impl From for LogIndex { + fn from(index: i64) -> Self { + LogIndex::new(index) } } impl From for LogIndex { fn from(index: u64) -> Self { - LogIndex::from_u64(index) - } -} - -impl AsRef for LogIndex { - fn as_ref(&self) -> &str { - &self.0 + LogIndex::new(index as i64) } } @@ -436,6 +449,61 @@ impl std::fmt::Display for LogIndex { } } +impl Serialize for LogIndex { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + // Serialize as string to match existing bundle format + serializer.serialize_str(&self.0.to_string()) + } +} + +impl<'de> Deserialize<'de> for LogIndex { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + use serde::de::{self, Visitor}; + + struct LogIndexVisitor; + + impl<'de> Visitor<'de> for LogIndexVisitor { + type Value = LogIndex; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("an integer or string representing a log index") + } + + fn visit_i64(self, value: i64) -> std::result::Result + where + E: de::Error, + { + Ok(LogIndex::new(value)) + } + + fn visit_u64(self, value: u64) -> std::result::Result + where + E: de::Error, + { + Ok(LogIndex::new(value as i64)) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: de::Error, + { + value + .parse::() + .map(LogIndex::new) + .map_err(|_| de::Error::custom(format!("invalid log index: {}", value))) + } + } + + deserializer.deserialize_any(LogIndexVisitor) + } +} + /// Transparency log key ID /// /// Base64-encoded identifier for a transparency log (typically SHA-256 of public key). diff --git a/crates/sigstore-types/src/lib.rs b/crates/sigstore-types/src/lib.rs index 4a12a0c..8f44f78 100644 --- a/crates/sigstore-types/src/lib.rs +++ b/crates/sigstore-types/src/lib.rs @@ -21,9 +21,9 @@ pub use bundle::{ pub use checkpoint::{Checkpoint, CheckpointSignature}; pub use dsse::{pae, DsseEnvelope, DsseSignature}; pub use encoding::{ - base64_bytes, base64_bytes_option, hex_bytes, CanonicalizedBody, DerCertificate, DerPublicKey, - EntryUuid, HexHash, HexLogId, KeyHint, KeyId, LogIndex, LogKeyId, PayloadBytes, PemContent, - Sha256Hash, SignatureBytes, SignedTimestamp, TimestampToken, + base64_bytes, base64_bytes_option, hex_bytes, string_i64, CanonicalizedBody, DerCertificate, + DerPublicKey, EntryUuid, HexHash, HexLogId, KeyHint, KeyId, LogIndex, LogKeyId, PayloadBytes, + PemContent, Sha256Hash, SignatureBytes, SignedTimestamp, TimestampToken, }; pub use error::{Error, Result}; pub use hash::HashAlgorithm; diff --git a/crates/sigstore-verify/Cargo.toml b/crates/sigstore-verify/Cargo.toml index 7ee3ded..2f93462 100644 --- a/crates/sigstore-verify/Cargo.toml +++ b/crates/sigstore-verify/Cargo.toml @@ -41,6 +41,7 @@ serde_json = { workspace = true } const-oid = { workspace = true } x509-cert = { workspace = true } sigstore-types = { workspace = true } +regex = { workspace = true } [features] default = [] diff --git a/crates/sigstore-verify/examples/verify_bundle.rs b/crates/sigstore-verify/examples/verify_bundle.rs index b24df58..bf74c89 100644 --- a/crates/sigstore-verify/examples/verify_bundle.rs +++ b/crates/sigstore-verify/examples/verify_bundle.rs @@ -12,11 +12,27 @@ //! Verify with identity requirements: //! ```sh //! cargo run -p sigstore-verify --example verify_bundle -- \ -//! --identity "https://github.com/owner/repo/.github/workflows/release.yml@refs/tags/v1.0.0" \ -//! --issuer "https://token.actions.githubusercontent.com" \ +//! --certificate-identity "https://github.com/owner/repo/.github/workflows/release.yml@refs/tags/v1.0.0" \ +//! --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ //! artifact.txt artifact.sigstore.json //! ``` //! +//! Verify with regex matching (cosign-compatible): +//! ```sh +//! cargo run -p sigstore-verify --example verify_bundle -- \ +//! --certificate-identity-regexp ".*" \ +//! --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ +//! artifact.txt artifact.sigstore.json +//! ``` +//! +//! Verify using digest instead of file: +//! ```sh +//! cargo run -p sigstore-verify --example verify_bundle -- \ +//! --certificate-identity-regexp ".*" \ +//! --certificate-oidc-issuer "https://token.actions.githubusercontent.com" \ +//! sha256:abc123... bundle.sigstore.json +//! ``` +//! //! # Getting a bundle from GitHub //! //! You can download attestation bundles from GitHub releases using the GitHub CLI: @@ -28,8 +44,9 @@ //! gh attestation verify --owner //! ``` +use regex::Regex; use sigstore_trust_root::TrustedRoot; -use sigstore_types::Bundle; +use sigstore_types::{Artifact, Bundle, Sha256Hash}; use sigstore_verify::{verify, VerificationPolicy}; use std::env; @@ -41,24 +58,33 @@ fn main() { // Parse arguments let mut identity: Option = None; + let mut identity_regexp: Option = None; let mut issuer: Option = None; let mut positional: Vec = Vec::new(); let mut i = 1; while i < args.len() { match args[i].as_str() { - "--identity" | "-i" => { + "--identity" | "-i" | "--certificate-identity" => { i += 1; if i >= args.len() { - eprintln!("Error: --identity requires a value"); + eprintln!("Error: --certificate-identity requires a value"); process::exit(1); } identity = Some(args[i].clone()); } - "--issuer" | "-o" => { + "--certificate-identity-regexp" => { i += 1; if i >= args.len() { - eprintln!("Error: --issuer requires a value"); + eprintln!("Error: --certificate-identity-regexp requires a value"); + process::exit(1); + } + identity_regexp = Some(args[i].clone()); + } + "--issuer" | "-o" | "--certificate-oidc-issuer" => { + i += 1; + if i >= args.len() { + eprintln!("Error: --certificate-oidc-issuer requires a value"); process::exit(1); } issuer = Some(args[i].clone()); @@ -80,22 +106,16 @@ fn main() { } if positional.len() != 2 { - eprintln!("Error: Expected exactly 2 positional arguments (artifact and bundle)"); + eprintln!("Error: Expected exactly 2 positional arguments (artifact/digest and bundle)"); print_usage(&args[0]); process::exit(1); } - let artifact_path = &positional[0]; + let artifact_or_digest = &positional[0]; let bundle_path = &positional[1]; - // Read artifact - let artifact = match fs::read(artifact_path) { - Ok(data) => data, - Err(e) => { - eprintln!("Error reading artifact '{}': {}", artifact_path, e); - process::exit(1); - } - }; + // Check if artifact is a digest (sha256:...) + let is_digest = artifact_or_digest.starts_with("sha256:"); // Read bundle let bundle_json = match fs::read_to_string(bundle_path) { @@ -135,7 +155,11 @@ fn main() { // Print bundle info println!("Verifying bundle..."); - println!(" Artifact: {}", artifact_path); + if is_digest { + println!(" Digest: {}", artifact_or_digest); + } else { + println!(" Artifact: {}", artifact_or_digest); + } println!(" Bundle: {}", bundle_path); println!(" Media Type: {}", bundle.media_type); if let Ok(v) = bundle.version() { @@ -144,13 +168,62 @@ fn main() { if let Some(id) = &identity { println!(" Required Identity: {}", id); } + if let Some(re) = &identity_regexp { + println!(" Required Identity Regexp: {}", re); + } if let Some(iss) = &issuer { println!(" Required Issuer: {}", iss); } // Verify - match verify(&artifact, &bundle, &policy, &trusted_root) { + let result = if is_digest { + // Parse digest (sha256:hex...) + let hex_digest = artifact_or_digest.strip_prefix("sha256:").unwrap(); + let digest = match Sha256Hash::from_hex(hex_digest) { + Ok(d) => d, + Err(e) => { + eprintln!("Error parsing digest: {}", e); + process::exit(1); + } + }; + let artifact = Artifact::from(digest); + verify(artifact, &bundle, &policy, &trusted_root) + } else { + // Read artifact file + let artifact_bytes = match fs::read(artifact_or_digest) { + Ok(data) => data, + Err(e) => { + eprintln!("Error reading artifact '{}': {}", artifact_or_digest, e); + process::exit(1); + } + }; + verify(&artifact_bytes, &bundle, &policy, &trusted_root) + }; + + match result { Ok(result) => { + // Check identity regexp if provided + if let Some(re_str) = &identity_regexp { + let re = match Regex::new(re_str) { + Ok(r) => r, + Err(e) => { + eprintln!("Error compiling identity regexp: {}", e); + process::exit(1); + } + }; + if let Some(id) = &result.identity { + if !re.is_match(id) { + eprintln!("\nVerification: FAILED"); + eprintln!(" Identity '{}' does not match regexp '{}'", id, re_str); + process::exit(1); + } + } else { + eprintln!("\nVerification: FAILED"); + eprintln!(" No identity found in certificate"); + process::exit(1); + } + } + if result.success { println!("\nVerification: SUCCESS"); if let Some(id) = &result.identity { @@ -182,30 +255,36 @@ fn main() { } fn print_usage(program: &str) { - eprintln!("Usage: {} [OPTIONS] ", program); + eprintln!("Usage: {} [OPTIONS] ", program); eprintln!(); eprintln!("Arguments:"); - eprintln!(" Path to the artifact file to verify"); - eprintln!(" Path to the Sigstore bundle (.sigstore.json)"); + eprintln!(" Path to artifact file OR sha256:hex digest"); + eprintln!(" Path to the Sigstore bundle (.sigstore.json)"); eprintln!(); eprintln!("Options:"); - eprintln!(" -i, --identity Required certificate identity (SAN)"); - eprintln!(" -o, --issuer Required OIDC issuer"); - eprintln!(" -h, --help Print this help message"); + eprintln!(" --certificate-identity Required certificate identity (exact match)"); + eprintln!(" --certificate-identity-regexp Required certificate identity (regex)"); + eprintln!(" --certificate-oidc-issuer Required OIDC issuer"); + eprintln!(" -h, --help Print this help message"); + eprintln!(); + eprintln!("Aliases (for backwards compatibility):"); + eprintln!(" -i, --identity Same as --certificate-identity"); + eprintln!(" -o, --issuer Same as --certificate-oidc-issuer"); eprintln!(); eprintln!("Examples:"); eprintln!(" # Verify a bundle"); eprintln!(" {} artifact.txt artifact.sigstore.json", program); eprintln!(); - eprintln!(" # Verify with identity requirements (for GitHub Actions)"); - eprintln!( - " {} --identity https://github.com/org/repo/.github/workflows/release.yml@refs/tags/v1.0.0 \\", - program - ); - eprintln!(" --issuer https://token.actions.githubusercontent.com \\"); + eprintln!(" # Verify with identity regex (cosign-compatible)"); + eprintln!(" {} --certificate-identity-regexp \".*\" \\", program); + eprintln!(" --certificate-oidc-issuer https://token.actions.githubusercontent.com \\"); eprintln!(" artifact.txt artifact.sigstore.json"); eprintln!(); + eprintln!(" # Verify using digest instead of file"); + eprintln!(" {} --certificate-identity-regexp \".*\" \\", program); + eprintln!(" --certificate-oidc-issuer https://token.actions.githubusercontent.com \\"); + eprintln!(" sha256:abc123def456... bundle.sigstore.json"); + eprintln!(); eprintln!("Getting bundles from GitHub:"); - eprintln!(" # Download attestation bundle for a GitHub release artifact"); eprintln!(" gh attestation download -o bundle.sigstore.json"); } diff --git a/crates/sigstore-verify/examples/verify_conda_attestation.rs b/crates/sigstore-verify/examples/verify_conda_attestation.rs new file mode 100644 index 0000000..f47a2ea --- /dev/null +++ b/crates/sigstore-verify/examples/verify_conda_attestation.rs @@ -0,0 +1,175 @@ +//! Example: Verify a Conda package attestation from GitHub Actions +//! +//! This example demonstrates how to verify attestations for conda packages +//! produced by GitHub Actions workflows, showing both the signature verification +//! and the in-toto attestation contents. +//! +//! # Usage +//! +//! ```sh +//! # Download a conda package and its attestation from GitHub +//! gh run download --repo +//! gh attestation download --repo +//! +//! # Verify the attestation +//! cargo run -p sigstore-verify --example verify_conda_attestation -- \ +//! package.conda attestation.sigstore.json +//! ``` +//! +//! # Example with test data +//! +//! ```sh +//! cargo run -p sigstore-verify --example verify_conda_attestation -- \ +//! crates/sigstore-verify/test_data/bundles/signed-package-2.1.0-hb0f4dca_0.conda \ +//! crates/sigstore-verify/test_data/bundles/conda-attestation.sigstore.json +//! ``` + +use sigstore_trust_root::TrustedRoot; +use sigstore_types::{bundle::SignatureContent, Bundle}; +use sigstore_verify::{verify, VerificationPolicy}; + +use std::env; +use std::fs; +use std::process; + +fn main() { + let args: Vec = env::args().collect(); + + if args.len() != 3 { + eprintln!("Usage: {} ", args[0]); + eprintln!(); + eprintln!("Arguments:"); + eprintln!(" Path to the .conda package file"); + eprintln!(" Path to the attestation bundle (.sigstore.json)"); + eprintln!(); + eprintln!("Example:"); + eprintln!(" {} package.conda attestation.sigstore.json", args[0]); + process::exit(1); + } + + let artifact_path = &args[1]; + let bundle_path = &args[2]; + + // Read artifact + let artifact = match fs::read(artifact_path) { + Ok(data) => data, + Err(e) => { + eprintln!("Error reading artifact '{}': {}", artifact_path, e); + process::exit(1); + } + }; + + // Read bundle + let bundle_json = match fs::read_to_string(bundle_path) { + Ok(data) => data, + Err(e) => { + eprintln!("Error reading bundle '{}': {}", bundle_path, e); + process::exit(1); + } + }; + + // Parse bundle + let bundle = match Bundle::from_json(&bundle_json) { + Ok(b) => b, + Err(e) => { + eprintln!("Error parsing bundle: {}", e); + process::exit(1); + } + }; + + // Load trusted root (production Sigstore infrastructure) + let trusted_root = match TrustedRoot::production() { + Ok(root) => root, + Err(e) => { + eprintln!("Error loading trusted root: {}", e); + process::exit(1); + } + }; + + // Print bundle info + println!("Verifying conda package attestation..."); + println!(); + println!("Package: {}", artifact_path); + println!("Bundle: {}", bundle_path); + println!("Media Type: {}", bundle.media_type); + + // Extract and display in-toto attestation info if present + if let SignatureContent::DsseEnvelope(dsse) = &bundle.content { + println!(); + println!("In-Toto Attestation:"); + println!(" Payload Type: {}", dsse.payload_type); + + // payload is already PayloadBytes + if let Ok(payload_str) = std::str::from_utf8(dsse.payload.as_bytes()) { + if let Ok(statement) = serde_json::from_str::(payload_str) { + if let Some(stmt_type) = statement.get("_type").and_then(|v| v.as_str()) { + println!(" Statement Type: {}", stmt_type); + } + if let Some(pred_type) = statement.get("predicateType").and_then(|v| v.as_str()) { + println!(" Predicate Type: {}", pred_type); + } + if let Some(subjects) = statement.get("subject").and_then(|v| v.as_array()) { + println!(" Subjects:"); + for subject in subjects { + if let Some(name) = subject.get("name").and_then(|v| v.as_str()) { + println!(" - {}", name); + if let Some(digest) = subject.get("digest") { + if let Some(sha256) = digest.get("sha256").and_then(|v| v.as_str()) + { + println!(" sha256: {}", sha256); + } + } + } + } + } + if let Some(predicate) = statement.get("predicate") { + println!(" Predicate:"); + if let Some(channel) = predicate.get("targetChannel").and_then(|v| v.as_str()) { + println!(" Target Channel: {}", channel); + } + } + } + } + } + + // Build verification policy - for GitHub Actions attestations, we expect + // the identity to be the workflow file path and issuer to be GitHub + let policy = + VerificationPolicy::default().require_issuer("https://token.actions.githubusercontent.com"); + + // Verify + println!(); + match verify(&artifact, &bundle, &policy, &trusted_root) { + Ok(result) => { + if result.success { + println!("Verification: SUCCESS"); + println!(); + println!("Certificate Details:"); + if let Some(id) = &result.identity { + println!(" Identity (SAN): {}", id); + } + if let Some(iss) = &result.issuer { + println!(" OIDC Issuer: {}", iss); + } + if let Some(time) = result.integrated_time { + use chrono::{DateTime, Utc}; + if let Some(dt) = DateTime::::from_timestamp(time, 0) { + println!(" Signed at: {}", dt); + } + } + for warning in &result.warnings { + println!(); + println!("Warning: {}", warning); + } + process::exit(0); + } else { + eprintln!("Verification: FAILED"); + process::exit(1); + } + } + Err(e) => { + eprintln!("Verification error: {}", e); + process::exit(1); + } + } +} diff --git a/crates/sigstore-verify/src/verify_impl/hashedrekord.rs b/crates/sigstore-verify/src/verify_impl/hashedrekord.rs index eab7492..730f701 100644 --- a/crates/sigstore-verify/src/verify_impl/hashedrekord.rs +++ b/crates/sigstore-verify/src/verify_impl/hashedrekord.rs @@ -318,8 +318,8 @@ fn validate_integrated_time(entry: &TransparencyLogEntry, bundle: &Bundle) -> Re let bundle_cert_der = bundle_cert.as_bytes(); // Only validate integrated time for hashedrekord 0.0.1 - // For 0.0.2 (Rekor v2), integrated_time is not present - if entry.kind_version.version == "0.0.1" && !entry.integrated_time.is_empty() { + // For 0.0.2 (Rekor v2), integrated_time is not present (0) + if entry.kind_version.version == "0.0.1" && entry.integrated_time != 0 { let cert = Certificate::from_der(bundle_cert_der).map_err(|e| { Error::Verification(format!( "failed to parse certificate for time validation: {}", @@ -345,9 +345,7 @@ fn validate_integrated_time(entry: &TransparencyLogEntry, bundle: &Bundle) -> Re })? .as_secs() as i64; - let integrated_time = entry.integrated_time.parse::().map_err(|e| { - Error::Verification(format!("failed to parse integrated time: {}", e)) - })?; + let integrated_time = entry.integrated_time; if integrated_time < not_before || integrated_time > not_after { return Err(Error::Verification(format!( diff --git a/crates/sigstore-verify/src/verify_impl/helpers.rs b/crates/sigstore-verify/src/verify_impl/helpers.rs index 667f244..12da346 100644 --- a/crates/sigstore-verify/src/verify_impl/helpers.rs +++ b/crates/sigstore-verify/src/verify_impl/helpers.rs @@ -164,15 +164,14 @@ fn extract_v1_integrated_time_with_promise(bundle: &Bundle) -> Option { continue; } - if let Ok(time) = entry.integrated_time.parse::() { - if time > 0 { - if let Some(earliest) = earliest_time { - if time < earliest { - earliest_time = Some(time); - } - } else { + let time = entry.integrated_time; + if time > 0 { + if let Some(earliest) = earliest_time { + if time < earliest { earliest_time = Some(time); } + } else { + earliest_time = Some(time); } } } diff --git a/crates/sigstore-verify/src/verify_impl/tlog.rs b/crates/sigstore-verify/src/verify_impl/tlog.rs index 337b78e..ad04e67 100644 --- a/crates/sigstore-verify/src/verify_impl/tlog.rs +++ b/crates/sigstore-verify/src/verify_impl/tlog.rs @@ -43,38 +43,34 @@ pub fn verify_tlog_entries( verify_set(entry, trusted_root)?; } - // Validate integrated time - if !entry.integrated_time.is_empty() { - if let Ok(time) = entry.integrated_time.parse::() { - // Ignore 0 as it indicates invalid/missing time - if time > 0 { - // Check that integrated time is not in the future (with clock skew tolerance) - let now = chrono::Utc::now().timestamp(); - if time > now + clock_skew_seconds { - return Err(Error::Verification(format!( - "integrated time {} is in the future (current time: {}, tolerance: {}s)", - time, now, clock_skew_seconds - ))); - } - - // Check that integrated time is within certificate validity period - if time < not_before { - return Err(Error::Verification(format!( - "integrated time {} is before certificate validity (not_before: {})", - time, not_before - ))); - } - - if time > not_after { - return Err(Error::Verification(format!( - "integrated time {} is after certificate validity (not_after: {})", - time, not_after - ))); - } - - integrated_time_result = Some(time); - } + // Validate integrated time (0 indicates missing/invalid time in v2 entries) + let time = entry.integrated_time; + if time > 0 { + // Check that integrated time is not in the future (with clock skew tolerance) + let now = chrono::Utc::now().timestamp(); + if time > now + clock_skew_seconds { + return Err(Error::Verification(format!( + "integrated time {} is in the future (current time: {}, tolerance: {}s)", + time, now, clock_skew_seconds + ))); } + + // Check that integrated time is within certificate validity period + if time < not_before { + return Err(Error::Verification(format!( + "integrated time {} is before certificate validity (not_before: {})", + time, not_before + ))); + } + + if time > not_after { + return Err(Error::Verification(format!( + "integrated time {} is after certificate validity (not_after: {})", + time, not_after + ))); + } + + integrated_time_result = Some(time); } } @@ -160,14 +156,11 @@ pub fn verify_set(entry: &TransparencyLogEntry, trusted_root: &TrustedRoot) -> R // Construct the payload (base64-encoded body) let body = entry.canonicalized_body.to_base64(); - let integrated_time = entry - .integrated_time - .parse::() - .map_err(|_| Error::Verification("Invalid integrated time".into()))?; + let integrated_time = entry.integrated_time; let log_index = entry .log_index .as_u64() - .map_err(|_| Error::Verification("Invalid log index".into()))? as i64; + .ok_or_else(|| Error::Verification("Invalid log index".into()))? as i64; // Log ID for payload must be hex encoded let log_id_bytes = base64::engine::general_purpose::STANDARD diff --git a/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.sigstore.json b/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.sigstore.json new file mode 100644 index 0000000..b5962ff --- /dev/null +++ b/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.sigstore.json @@ -0,0 +1 @@ +{"mediaType":"application/vnd.dev.sigstore.bundle.v0.3+json", "verificationMaterial":{"certificate":{"rawBytes":"MIIC1TCCAlygAwIBAgIUQ6rCmpLcP7MAAjGQvMmPgMtSJdQwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjUxMjAzMTgzNjQyWhcNMjUxMjAzMTg0NjQyWjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEFodOSridzGjgIAIl3/2N+eP4dMBEM0oMNJnbWLPBnASGSdjtYr8KvEoxYXTqc47tu22hKYyfnNPkADR1Q9FXeKOCAXswggF3MA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUJjVZgdkHLo7sM1/lIx5dEthq9mgwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wJAYDVR0RAQH/BBowGIEWdy52b2xscHJlY2h0QGdtYWlsLmNvbTAsBgorBgEEAYO/MAEBBB5odHRwczovL2dpdGh1Yi5jb20vbG9naW4vb2F1dGgwLgYKKwYBBAGDvzABCAQgDB5odHRwczovL2dpdGh1Yi5jb20vbG9naW4vb2F1dGgwgYsGCisGAQQB1nkCBAIEfQR7AHkAdwDdPTBqxscRMmMZHhyZZzcCokpeuN48rf+HinKALynujgAAAZrlgJ0SAAAEAwBIMEYCIQC19RUfoY4zDUcXuEFD+jCs123iUaL3QzSC//Kf67mp5QIhAMtP95BFoDh17zzIVI5Dz2PJJx9KXG5eVdPrpkV2suvAMAoGCCqGSM49BAMDA2cAMGQCMHN5sdKmaUOF2pBGygVP8xlrxTHjh9A1y6B6YkXzpB8WAsjX0vqsPJ8s8gWFjfLo0wIwFPXpDGO0hentwcuKCnl10/Vk1yFUrb6BB0/Fg+1yJiiBn4FsJUbWacpt7KHCqQu1"}, "tlogEntries":[{"logIndex":"738312748", "logId":{"keyId":"wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="}, "kindVersion":{"kind":"hashedrekord", "version":"0.0.1"}, "integratedTime":"1764787003", "inclusionPromise":{"signedEntryTimestamp":"MEUCIGduMyNHwdmNItO2glTI3vsJcX0UZJ7gPl9mb/+9gCYRAiEAvwcs6NeB049MkPeRa/t9tbkBUzVijo+fYxriOiEX994="}, "inclusionProof":{"logIndex":"616408486", "rootHash":"dMhojWLwKZjp3xdLBFgCysbIYp4FTe94vs70zFB4nAM=", "treeSize":"616408488", "hashes":["PXpZ49ay970tZNI9tKk2a8N3w2YdhDeXo4xr4fldbTs=", "1E5+TJDHAuuUwwZBcfvFYKuwJbBirKU17fRl1zH48hg=", "NkvH/aihlVQn4eqiKz2NVaPTPYslEStiaKwdOm/G9zY=", "aQmQQHU/7x9gnmLiB6M2jAGKuw73ALuyUm+wCfZW7bg=", "fcYaHYkZkRlEMpxUKAOD33AH4yBLcLEOBC6L5Lkv6/I=", "+jY3qOWPfdh1hLyOWUgtjykWOAlnyX2KIHU5+ONqVUI=", "toOguUn75si9YDuf9zTmXNv47noi41dcEcdegoMtJgo=", "rQVrtBDyDEqA6tmX1hbd2SHoYdcw1cKBoastRdMC0Lg=", "2lqmP0g7dTwttCuD3m5rAbf8af9Ydl/2Ct3fz7wwCJo=", "P07VUPWy7Jq8+bSaQ5xCZVQhhdVzYywP94WlTMaAifU=", "QRYmzeY4Zi71BnW0Mh1SWT9pavlAn9DhRMiRIgiTX9Q=", "4uYCK3kl03eiD37zCe/PGO3kCP/yLCSWEaTYkLGifio=", "aosWMjs6qwpL80j4koy1dyO/vNa5Q6NlvxMT+2T2GwA=", "Wf0Z+YbBL8mPn9AeMCdcxxuSxc7DuhUVP1iFflyC/hc=", "huaH1ZSkRyP4+vpmGtpmkkL845lhcmN9io8MIe6Sob0=", "ZmUkYkHBy1B723JrEgiKvepTdHYrP6y2a4oODYvi5VY=", "T4DqWD42hAtN+vX8jKCWqoC4meE4JekI9LxYGCcPy1M="], "checkpoint":{"envelope":"rekor.sigstore.dev - 1193050959916656506\n616408488\ndMhojWLwKZjp3xdLBFgCysbIYp4FTe94vs70zFB4nAM=\n\n— rekor.sigstore.dev wNI9ajBEAiB9yqLZxKPPSG4EYgyJq9C884egwJ32aYU8MyqeYXYiJQIgebrTUhIavn7+VyFhLnWNx7yC+ftNqRNfu+IzwTv08KM=\n"}}, "canonicalizedBody":"eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJlMTk0Yjk5ZGY2NjM5NTAxMzI1YzVmNDRkYTc5ZmUzYThlZmFlNmYyMzliZDc5NDQzYTE5Nzg3ZTA2ZmIxNzY0In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJRkx0SmQ2WCtUU3pxOURFM3Z1eGltWkNuSmVZTTM0RnJMa3hVK3dpTE1MSkFpRUFneFQ3VnQzbVh1Tm5jT25vM3lvcFkwUW5vTlpMODE5aVoxT0xhcGNrYUJ3PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCRFJWSlVTVVpKUTBGVVJTMHRMUzB0Q2sxSlNVTXhWRU5EUVd4NVowRjNTVUpCWjBsVlVUWnlRMjF3VEdOUU4wMUJRV3BIVVhaTmJWQm5UWFJUU21SUmQwTm5XVWxMYjFwSmVtb3dSVUYzVFhjS1RucEZWazFDVFVkQk1WVkZRMmhOVFdNeWJHNWpNMUoyWTIxVmRWcEhWakpOVWpSM1NFRlpSRlpSVVVSRmVGWjZZVmRrZW1SSE9YbGFVekZ3WW01U2JBcGpiVEZzV2tkc2FHUkhWWGRJYUdOT1RXcFZlRTFxUVhwTlZHZDZUbXBSZVZkb1kwNU5hbFY0VFdwQmVrMVVaekJPYWxGNVYycEJRVTFHYTNkRmQxbElDa3R2V2tsNmFqQkRRVkZaU1V0dldrbDZhakJFUVZGalJGRm5RVVZHYjJSUFUzSnBaSHBIYW1kSlFVbHNNeTh5VGl0bFVEUmtUVUpGVFRCdlRVNUtibUlLVjB4UVFtNUJVMGRUWkdwMFdYSTRTM1pGYjNoWldGUnhZelEzZEhVeU1taExXWGxtYms1UWEwRkVVakZST1VaWVpVdFBRMEZZYzNkblowWXpUVUUwUndwQk1WVmtSSGRGUWk5M1VVVkJkMGxJWjBSQlZFSm5UbFpJVTFWRlJFUkJTMEpuWjNKQ1owVkdRbEZqUkVGNlFXUkNaMDVXU0ZFMFJVWm5VVlZLYWxaYUNtZGthMGhNYnpkelRURXZiRWw0TldSRmRHaHhPVzFuZDBoM1dVUldVakJxUWtKbmQwWnZRVlV6T1ZCd2VqRlphMFZhWWpWeFRtcHdTMFpYYVhocE5Ga0tXa1E0ZDBwQldVUldVakJTUVZGSUwwSkNiM2RIU1VWWFpIazFNbUl5ZUhOalNFcHNXVEpvTUZGSFpIUlpWMnh6VEcxT2RtSlVRWE5DWjI5eVFtZEZSUXBCV1U4dlRVRkZRa0pDTlc5a1NGSjNZM3B2ZGt3eVpIQmtSMmd4V1drMWFtSXlNSFppUnpsdVlWYzBkbUl5UmpGa1IyZDNUR2RaUzB0M1dVSkNRVWRFQ25aNlFVSkRRVkZuUkVJMWIyUklVbmRqZW05MlRESmtjR1JIYURGWmFUVnFZakl3ZG1KSE9XNWhWelIyWWpKR01XUkhaM2RuV1hOSFEybHpSMEZSVVVJS01XNXJRMEpCU1VWbVVWSTNRVWhyUVdSM1JHUlFWRUp4ZUhOalVrMXRUVnBJYUhsYVducGpRMjlyY0dWMVRqUTRjbVlyU0dsdVMwRk1lVzUxYW1kQlFRcEJXbkpzWjBvd1UwRkJRVVZCZDBKSlRVVlpRMGxSUXpFNVVsVm1iMWswZWtSVlkxaDFSVVpFSzJwRGN6RXlNMmxWWVV3elVYcFRReTh2UzJZMk4yMXdDalZSU1doQlRYUlFPVFZDUm05RWFERTNlbnBKVmtrMVJIb3lVRXBLZURsTFdFYzFaVlprVUhKd2ExWXljM1YyUVUxQmIwZERRM0ZIVTAwME9VSkJUVVFLUVRKalFVMUhVVU5OU0U0MWMyUkxiV0ZWVDBZeWNFSkhlV2RXVURoNGJISjRWRWhxYURsQk1YazJRalpaYTFoNmNFSTRWMEZ6YWxnd2RuRnpVRW80Y3dvNFoxZEdhbVpNYnpCM1NYZEdVRmh3UkVkUE1HaGxiblIzWTNWTFEyNXNNVEF2Vm1zeGVVWlZjbUkyUWtJd0wwWm5LekY1U21scFFtNDBSbk5LVldKWENtRmpjSFEzUzBoRGNWRjFNUW90TFMwdExVVk9SQ0JEUlZKVVNVWkpRMEZVUlMwdExTMHRDZz09In19fX0="}], "timestampVerificationData":{"rfc3161Timestamps":[{"signedTimestamp":"MIICyTADAgEAMIICwAYJKoZIhvcNAQcCoIICsTCCAq0CAQMxDTALBglghkgBZQMEAgEwgbcGCyqGSIb3DQEJEAEEoIGnBIGkMIGhAgEBBgkrBgEEAYO/MAIwMTANBglghkgBZQMEAgEFAAQguqhf7Onr5+rrhPgQSAQ2M98wRN4SunAlt3DRfOdeem4CFEvA/X2Y+Q7SgSeIzV1sd9Lg1hLkGA8yMDI1MTIwMzE4MzY0MlowAwIBAaAypDAwLjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MRUwEwYDVQQDEwxzaWdzdG9yZS10c2GgADGCAdswggHXAgEBMFEwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZAIUOhNULwyQYe68wUMvy4qOiyojiwwwCwYJYIZIAWUDBAIBoIH8MBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAcBgkqhkiG9w0BCQUxDxcNMjUxMjAzMTgzNjQyWjAvBgkqhkiG9w0BCQQxIgQgP6xU4az/FUNWOr+jjTj3KXhOKoNUQG3ZGnpZcUvMg/YwgY4GCyqGSIb3DQEJEAIvMX8wfTB7MHkEIIX5J7wHq2LKw7RDVsEO/IGyxog/2nq55thw2dE6zQW3MFUwPaQ7MDkxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEgMB4GA1UEAxMXc2lnc3RvcmUtdHNhLXNlbGZzaWduZWQCFDoTVC8MkGHuvMFDL8uKjosqI4sMMAoGCCqGSM49BAMCBGcwZQIxANQPSum/tL0sOFNuMzbhrzdfPxu5/cQ5s0KoILeeaygvJS6HLFxL1/vI7FpIXZUO3QIwRLuZtNOwyQ8Dr8YWhaqred+vhJXCADGEEExEkiHX1kYEYkPZOvk/j85M2aBrYYHA"}]}}, "messageSignature":{"messageDigest":{"algorithm":"SHA2_256", "digest":"4ZS5nfZjlQEyXF9E2nn+Oo765vI5vXlEOhl4fgb7F2Q="}, "signature":"MEUCIFLtJd6X+TSzq9DE3vuximZCnJeYM34FrLkxU+wiLMLJAiEAgxT7Vt3mXuNncOno3yopY0QnoNZL819iZ1OLapckaBw="}} \ No newline at end of file diff --git a/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.txt b/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.txt new file mode 100644 index 0000000..4aacff6 --- /dev/null +++ b/crates/sigstore-verify/test_data/bundles/cosign-v3-blob.txt @@ -0,0 +1 @@ +test content for cosign diff --git a/crates/sigstore-verify/tests/verification_tests.rs b/crates/sigstore-verify/tests/verification_tests.rs index 6b1e64e..3a85f8c 100644 --- a/crates/sigstore-verify/tests/verification_tests.rs +++ b/crates/sigstore-verify/tests/verification_tests.rs @@ -291,11 +291,11 @@ fn test_full_verification_flow() { // Extract tlog entry info let entry = &bundle.verification_material.tlog_entries[0]; assert_eq!(entry.kind_version.kind, "dsse"); - assert_eq!(entry.log_index, LogIndex::new("166143216".to_string())); + assert_eq!(entry.log_index, LogIndex::new(166143216)); // Verify inclusion proof let proof = entry.inclusion_proof.as_ref().expect("Should have proof"); - assert_eq!(proof.tree_size, "44238955"); + assert_eq!(proof.tree_size, 44238955); assert_eq!(proof.hashes.len(), 10); // Run full verification - extract digest from bundle @@ -332,11 +332,11 @@ fn test_full_verification_flow_happy_path() { // Extract tlog entry info let entry = &bundle.verification_material.tlog_entries[0]; assert_eq!(entry.kind_version.kind, "dsse"); - assert_eq!(entry.log_index, LogIndex::new("155690850".to_string())); + assert_eq!(entry.log_index, LogIndex::new(155690850)); // Verify inclusion proof let proof = entry.inclusion_proof.as_ref().expect("Should have proof"); - assert_eq!(proof.tree_size, "33786589"); + assert_eq!(proof.tree_size, 33786589); assert_eq!(proof.hashes.len(), 11); // Run full verification - extract digest from bundle @@ -936,3 +936,60 @@ fn test_verify_conda_package_tampered() { "Verification should fail with tampered package" ); } + +// Cosign v0.3 blob bundle for interop testing +const COSIGN_V3_BLOB_BUNDLE: &str = + include_str!("../test_data/bundles/cosign-v3-blob.sigstore.json"); + +/// Test that we can parse a bundle produced by cosign v3.x +#[test] +fn test_parse_cosign_v3_blob_bundle() { + let bundle = + Bundle::from_json(COSIGN_V3_BLOB_BUNDLE).expect("Failed to parse cosign v3 blob bundle"); + + // Check media type + assert_eq!( + bundle.media_type, + "application/vnd.dev.sigstore.bundle.v0.3+json" + ); + + // Check it's a message signature (not DSSE) + assert!( + matches!( + bundle.content, + sigstore_verify::types::SignatureContent::MessageSignature(_) + ), + "Expected MessageSignature content" + ); + + // Check tlog entry + assert_eq!(bundle.verification_material.tlog_entries.len(), 1); + let entry = &bundle.verification_material.tlog_entries[0]; + assert_eq!(entry.kind_version.kind, "hashedrekord"); + assert_eq!(entry.kind_version.version, "0.0.1"); + + // Check it has both inclusion proof and inclusion promise + assert!(entry.inclusion_proof.is_some(), "Expected inclusion proof"); + assert!( + entry.inclusion_promise.is_some(), + "Expected inclusion promise (SET)" + ); + + // Check integrated time is present + assert!(entry.integrated_time > 0, "Expected integrated time > 0"); +} + +/// Test full verification of cosign-produced bundle +#[test] +fn test_verify_cosign_v3_blob_bundle() { + let bundle = + Bundle::from_json(COSIGN_V3_BLOB_BUNDLE).expect("Failed to parse cosign v3 blob bundle"); + + // The artifact content that was signed + let artifact = include_bytes!("../test_data/bundles/cosign-v3-blob.txt"); + + let policy = VerificationPolicy::default().require_issuer("https://github.com/login/oauth"); + + let result = verify(artifact, &bundle, &policy, &production_root()); + assert!(result.is_ok(), "Verification failed: {:?}", result.err()); +} diff --git a/scripts/compare_bundles.py b/scripts/compare_bundles.py new file mode 100755 index 0000000..ee9ce13 --- /dev/null +++ b/scripts/compare_bundles.py @@ -0,0 +1,212 @@ +#!/usr/bin/env python3 +"""Compare two Sigstore bundles to ensure structural compatibility. + +This script checks that bundles produced by different implementations +(e.g., sigstore-rust vs cosign) have the same structure and format. +""" + +import json +import sys +from pathlib import Path + + +def get_structure(obj, path=""): + """Recursively get the structure of a JSON object.""" + if isinstance(obj, dict): + result = {} + for key, value in obj.items(): + new_path = f"{path}.{key}" if path else key + result[key] = get_structure(value, new_path) + return {"type": "object", "keys": result} + elif isinstance(obj, list): + if len(obj) > 0: + # Get structure of first element as representative + return {"type": "array", "element": get_structure(obj[0], f"{path}[0]")} + return {"type": "array", "element": None} + elif isinstance(obj, str): + return {"type": "string", "length": len(obj)} + elif isinstance(obj, int): + return {"type": "int"} + elif isinstance(obj, float): + return {"type": "float"} + elif isinstance(obj, bool): + return {"type": "bool"} + elif obj is None: + return {"type": "null"} + else: + return {"type": str(type(obj))} + + +def compare_structures(struct1, struct2, path="root"): + """Compare two structures and report differences.""" + differences = [] + + if struct1["type"] != struct2["type"]: + differences.append(f"{path}: type mismatch - {struct1['type']} vs {struct2['type']}") + return differences + + if struct1["type"] == "object": + keys1 = set(struct1["keys"].keys()) + keys2 = set(struct2["keys"].keys()) + + only_in_1 = keys1 - keys2 + only_in_2 = keys2 - keys1 + common = keys1 & keys2 + + for key in only_in_1: + differences.append(f"{path}.{key}: only in first bundle") + for key in only_in_2: + differences.append(f"{path}.{key}: only in second bundle") + + for key in common: + differences.extend( + compare_structures( + struct1["keys"][key], + struct2["keys"][key], + f"{path}.{key}" + ) + ) + + elif struct1["type"] == "array": + if struct1["element"] is not None and struct2["element"] is not None: + differences.extend( + compare_structures( + struct1["element"], + struct2["element"], + f"{path}[]" + ) + ) + + return differences + + +def check_bundle_format(bundle, name): + """Check that a bundle follows expected format conventions.""" + issues = [] + + # Check media type + if "mediaType" not in bundle: + issues.append(f"{name}: missing mediaType") + elif not bundle["mediaType"].startswith("application/vnd.dev.sigstore.bundle"): + issues.append(f"{name}: unexpected mediaType format: {bundle['mediaType']}") + + # Check verification material + vm = bundle.get("verificationMaterial", {}) + + # Check certificate format + if "certificate" in vm: + cert = vm["certificate"] + if "rawBytes" not in cert: + issues.append(f"{name}: certificate missing rawBytes") + elif "x509CertificateChain" in vm: + chain = vm["x509CertificateChain"] + if "certificates" not in chain: + issues.append(f"{name}: x509CertificateChain missing certificates") + + # Check tlog entries + tlog_entries = vm.get("tlogEntries", []) + for i, entry in enumerate(tlog_entries): + prefix = f"{name}.tlogEntries[{i}]" + + # Required fields + required = ["logIndex", "logId", "kindVersion", "canonicalizedBody"] + for field in required: + if field not in entry: + issues.append(f"{prefix}: missing required field '{field}'") + + # Check logIndex is string (per protobuf spec) + if "logIndex" in entry: + if not isinstance(entry["logIndex"], str): + issues.append(f"{prefix}.logIndex: should be string, got {type(entry['logIndex']).__name__}") + + # Check integratedTime format if present + if "integratedTime" in entry: + if not isinstance(entry["integratedTime"], str): + issues.append(f"{prefix}.integratedTime: should be string, got {type(entry['integratedTime']).__name__}") + + # Check inclusion proof + if "inclusionProof" in entry: + proof = entry["inclusionProof"] + if "logIndex" in proof and not isinstance(proof["logIndex"], str): + issues.append(f"{prefix}.inclusionProof.logIndex: should be string") + if "treeSize" in proof and not isinstance(proof["treeSize"], str): + issues.append(f"{prefix}.inclusionProof.treeSize: should be string") + + return issues + + +def main(): + if len(sys.argv) < 3: + print("Usage: compare_bundles.py ") + print(" compare_bundles.py --check ") + sys.exit(1) + + if sys.argv[1] == "--check": + # Single bundle format check + bundle_path = Path(sys.argv[2]) + with open(bundle_path) as f: + bundle = json.load(f) + + issues = check_bundle_format(bundle, bundle_path.name) + + if issues: + print(f"Format issues in {bundle_path.name}:") + for issue in issues: + print(f" - {issue}") + sys.exit(1) + else: + print(f"✅ {bundle_path.name} format OK") + sys.exit(0) + + # Compare two bundles + bundle1_path = Path(sys.argv[1]) + bundle2_path = Path(sys.argv[2]) + + with open(bundle1_path) as f: + bundle1 = json.load(f) + with open(bundle2_path) as f: + bundle2 = json.load(f) + + print(f"Comparing {bundle1_path.name} vs {bundle2_path.name}") + print() + + # Check individual bundle formats + issues1 = check_bundle_format(bundle1, bundle1_path.name) + issues2 = check_bundle_format(bundle2, bundle2_path.name) + + if issues1: + print(f"Format issues in {bundle1_path.name}:") + for issue in issues1: + print(f" - {issue}") + print() + + if issues2: + print(f"Format issues in {bundle2_path.name}:") + for issue in issues2: + print(f" - {issue}") + print() + + # Compare structures + struct1 = get_structure(bundle1) + struct2 = get_structure(bundle2) + + differences = compare_structures(struct1, struct2) + + if differences: + print("Structural differences:") + for diff in differences: + print(f" - {diff}") + print() + + # Summary + total_issues = len(issues1) + len(issues2) + len(differences) + if total_issues == 0: + print("✅ Bundles are structurally identical and properly formatted") + sys.exit(0) + else: + print(f"❌ Found {total_issues} issue(s)") + sys.exit(1) + + +if __name__ == "__main__": + main()