Skip to content
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ provekit-verifier-server = { path = "tooling/verifier-server" }
# 3rd party
anyhow = "1.0.93"
argh = "0.1.12"
arrayvec = "0.7"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

arrayvec unused.

axum = "0.8.4"
base64 = "0.22.1"
bytes = "1.10.1"
Expand All @@ -112,7 +113,9 @@ ruint = { version = "1.12.3", features = ["num-traits", "rand"] }
seq-macro = "0.3.6"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10.9"
sha2 = { version = "0.10.9", features = ["asm"] }
sha3 = "0.11.0-rc.3"
blake3 = "1.5.6"
test-case = "3.3.1"
toml = "0.8.8"
tokio = { version = "1.47.1", features = ["full"] }
Expand Down Expand Up @@ -143,7 +146,7 @@ noirc_driver = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.1
ark-bn254 = { version = "0.5.0", default-features = false, features = [
"scalar_field",
] }
ark-crypto-primitives = { version = "0.5", features = ["merkle_tree"] }
ark-crypto-primitives = { version = "0.5", features = ["merkle_tree", "parallel"] }
ark-ff = { version = "0.5", features = ["asm", "std"] }
ark-poly = "0.5"
ark-serialize = "0.5"
Expand Down
8 changes: 8 additions & 0 deletions provekit/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true

[features]
default = []

[dependencies]
# Workspace crates
skyscraper.workspace = true
Expand All @@ -28,15 +31,20 @@ whir.workspace = true

# 3rd party
anyhow.workspace = true
arrayvec.workspace = true
bytes.workspace = true
hex.workspace = true
itertools.workspace = true
postcard.workspace = true
rand.workspace = true
rand08.workspace = true
rayon.workspace = true
ruint.workspace = true
serde.workspace = true
serde_json.workspace = true
sha2.workspace = true
sha3.workspace = true
blake3.workspace = true
tracing.workspace = true
zerocopy.workspace = true
zeroize.workspace = true
Expand Down
156 changes: 156 additions & 0 deletions provekit/common/src/blake3/hash.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
//! BLAKE3 hash implementations for Merkle tree construction.

use {
crate::FieldElement,
ark_crypto_primitives::{
crh::{CRHScheme, TwoToOneCRHScheme},
Error,
},
ark_serialize::CanonicalSerialize,
rand08::Rng,
serde::{Deserialize, Serialize},
std::{borrow::Borrow, io::Write},
whir::crypto::merkle_tree::digest::GenericDigest,
};

pub type Blake3Digest = GenericDigest<32>;

/// 8-byte length prefix + up to 16 field elements (16 * 32 = 512 bytes).
const LEAF_BUFFER_SIZE: usize = 528;

struct StackBuffer {
buf: [u8; LEAF_BUFFER_SIZE],
pos: usize,
}

impl StackBuffer {
fn new() -> Self {
Self {
buf: [0u8; LEAF_BUFFER_SIZE],
pos: 0,
}
}

fn as_slice(&self) -> &[u8] {
&self.buf[..self.pos]
}
}

impl Write for StackBuffer {
fn write(&mut self, data: &[u8]) -> std::io::Result<usize> {
let available = LEAF_BUFFER_SIZE - self.pos;
if data.len() > available {
return Err(std::io::Error::new(
std::io::ErrorKind::WriteZero,
"buffer overflow",
));
}
self.buf[self.pos..self.pos + data.len()].copy_from_slice(data);
self.pos += data.len();
Ok(data.len())
}

fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}

#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Blake3LeafHash;

impl CRHScheme for Blake3LeafHash {
type Input = [FieldElement];
type Output = Blake3Digest;
type Parameters = ();

fn setup<R: Rng>(_: &mut R) -> Result<Self::Parameters, Error> {
Ok(())
}

fn evaluate<T: Borrow<Self::Input>>(
_: &Self::Parameters,
input: T,
) -> Result<Self::Output, Error> {
let input = input.borrow();
let required_size = 8 + input.len() * 32;

if required_size <= LEAF_BUFFER_SIZE {
let mut buf = StackBuffer::new();
input.serialize_compressed(&mut buf)?;
let output: [u8; 32] = blake3::hash(buf.as_slice()).into();
Ok(output.into())
} else {
let mut buf = Vec::with_capacity(required_size);
input.serialize_compressed(&mut buf)?;
let output: [u8; 32] = blake3::hash(&buf).into();
Ok(output.into())
}
}
}

#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Blake3Compress;

impl TwoToOneCRHScheme for Blake3Compress {
type Input = Blake3Digest;
type Output = Blake3Digest;
type Parameters = ();

fn setup<R: Rng>(_: &mut R) -> Result<Self::Parameters, Error> {
Ok(())
}

fn evaluate<T: Borrow<Self::Input>>(
_: &Self::Parameters,
left_input: T,
right_input: T,
) -> Result<Self::Output, Error> {
let mut buf = [0u8; 64];
buf[..32].copy_from_slice(&left_input.borrow().0);
buf[32..].copy_from_slice(&right_input.borrow().0);
let output: [u8; 32] = blake3::hash(&buf).into();
Ok(output.into())
}

fn compress<T: Borrow<Self::Output>>(
parameters: &Self::Parameters,
left_input: T,
right_input: T,
) -> Result<Self::Output, Error> {
Self::evaluate(parameters, left_input, right_input)
}
}

#[cfg(test)]
mod tests {
use {
super::*,
ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme},
ark_ff::One,
whir::crypto::merkle_tree::blake3::{
Blake3Compress as WhirCompress, Blake3LeafHash as WhirLeafHash,
},
};

#[test]
fn leaf_hash_matches_whir() {
let input = vec![
FieldElement::one(),
FieldElement::from(42u64),
FieldElement::from(123456u64),
FieldElement::from(999999u64),
];
let whir = WhirLeafHash::<FieldElement>::evaluate(&(), input.as_slice()).unwrap();
let ours = Blake3LeafHash::evaluate(&(), input.as_slice()).unwrap();
assert_eq!(whir, ours);
}

#[test]
fn compress_matches_whir() {
let left: Blake3Digest = [1u8; 32].into();
let right: Blake3Digest = [2u8; 32].into();
let whir = WhirCompress::evaluate(&(), &left, &right).unwrap();
let ours = Blake3Compress::evaluate(&(), &left, &right).unwrap();
assert_eq!(whir, ours);
}
}
27 changes: 27 additions & 0 deletions provekit/common/src/blake3/merkle.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
//! BLAKE3-based Merkle tree configuration.

use {
crate::FieldElement, ark_crypto_primitives::merkle_tree::Config,
whir::crypto::merkle_tree::digest::GenericDigest,
};

pub type Blake3Digest = GenericDigest<32>;

#[derive(Clone, Debug)]
pub struct Blake3MerkleConfig;

impl Config for Blake3MerkleConfig {
type Leaf = [FieldElement];
type LeafDigest = Blake3Digest;
type LeafInnerDigestConverter =
ark_crypto_primitives::merkle_tree::IdentityDigestConverter<Blake3Digest>;
type InnerDigest = Blake3Digest;
type LeafHash = crate::blake3::Blake3LeafHash;
type TwoToOneHash = crate::blake3::Blake3Compress;
}

impl crate::hash_config::TypedHashConfig for Blake3MerkleConfig {
const HASH_CONFIG: crate::HashConfig = crate::HashConfig::Blake3;
type Sponge = crate::blake3::Blake3Sponge;
type Unit = u8;
}
14 changes: 14 additions & 0 deletions provekit/common/src/blake3/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
//! BLAKE3-based hash components for ProveKit.

mod hash;
mod merkle;
mod pow;
mod sponge;
mod whir;

pub use {
hash::{Blake3Compress, Blake3LeafHash},
merkle::{Blake3Digest, Blake3MerkleConfig},
pow::Blake3PoW,
sponge::Blake3Sponge,
};
5 changes: 5 additions & 0 deletions provekit/common/src/blake3/pow.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
//! BLAKE3-based Proof-of-Work implementation.
//!
//! Re-exports the Blake3PoW implementation from spongefish-pow library.

pub use spongefish_pow::blake3::Blake3PoW;
108 changes: 108 additions & 0 deletions provekit/common/src/blake3/sponge.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
//! BLAKE3 sponge for Fiat-Shamir transcripts.
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Similar to the SHA256 sponge, this is a custom duplex construction. BLAKE3's XOF mode is used for squeezing which is good, but the absorb/squeeze state machine (ratcheting on mode switch) should be documented or reviewed for Fiat-Shamir security.

//!
//! This module provides a BLAKE3-based duplex sponge construction
//! for Fiat-Shamir transformations in WHIR proofs, leveraging BLAKE3's
//! extendable output function (XOF) capabilities.

use {blake3, spongefish::duplex_sponge::DuplexSpongeInterface, zeroize::Zeroize};

/// BLAKE3 duplex sponge for Fiat-Shamir transcripts.
///
/// This provides a duplex sponge construction using BLAKE3's XOF mode.
/// BLAKE3 is a modern, fast cryptographic hash function with excellent
/// performance characteristics.
///
/// - **Performance**: Typically faster than SHA256 and Keccak
/// - **Security**: 256-bit security level
/// - **XOF**: Extendable output function for arbitrary-length outputs
#[derive(Clone)]
pub struct Blake3Sponge {
/// Current hasher state for absorbing
hasher: blake3::Hasher,
/// Cached output for squeezing
output_reader: Option<blake3::OutputReader>,
/// Mode: true = absorbing, false = squeezing
absorbing: bool,
}

impl Default for Blake3Sponge {
fn default() -> Self {
Self {
hasher: blake3::Hasher::new(),
output_reader: None,
absorbing: true,
}
}
}

impl DuplexSpongeInterface<u8> for Blake3Sponge {
fn new(iv: [u8; 32]) -> Self {
let mut hasher = blake3::Hasher::new();
hasher.update(&iv);
Self {
hasher,
output_reader: None,
absorbing: true,
}
}

fn absorb_unchecked(&mut self, input: &[u8]) -> &mut Self {
// If we were squeezing, finalize that phase and restart
if !self.absorbing {
// Ratchet: hash the previous state to get a new starting point
let prev_hash = if let Some(ref mut reader) = self.output_reader {
let mut buf = [0u8; 32];
reader.fill(&mut buf);
buf
} else {
*self.hasher.finalize().as_bytes()
};

self.hasher = blake3::Hasher::new();
self.hasher.update(&prev_hash);
self.output_reader = None;
self.absorbing = true;
}

self.hasher.update(input);
self
}

fn squeeze_unchecked(&mut self, output: &mut [u8]) -> &mut Self {
// If we were absorbing, switch to squeezing mode
if self.absorbing {
self.output_reader = Some(self.hasher.finalize_xof());
self.absorbing = false;
}

if let Some(ref mut reader) = self.output_reader {
reader.fill(output);
}
self
}

fn ratchet_unchecked(&mut self) -> &mut Self {
// Finalize current state and restart with the hash as seed
let hash = if let Some(ref mut reader) = self.output_reader {
let mut buf = [0u8; 32];
reader.fill(&mut buf);
buf
} else {
*self.hasher.finalize().as_bytes()
};

self.hasher = blake3::Hasher::new();
self.hasher.update(&hash);
self.output_reader = None;
self.absorbing = true;
self
}
}

impl Zeroize for Blake3Sponge {
fn zeroize(&mut self) {
self.hasher = blake3::Hasher::new();
self.output_reader = None;
self.absorbing = true;
}
}
Loading
Loading