Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ members = [
"bincode",
"blake3-hasher",
"bls-signatures",
"bls12-381-core",
"bn254",
"borsh",
"client-traits",
Expand Down Expand Up @@ -176,6 +177,7 @@ five8_const = "1.0.0"
getrandom = "0.2.10"
group = "0.13.0"
hex = "0.4.3"
hex-literal = "1.1.0"
hmac = "0.12.1"
im = "15.1.0"
indicatif = "0.17.9"
Expand Down Expand Up @@ -226,6 +228,7 @@ solana-big-mod-exp = { path = "big-mod-exp", version = "3.0.0" }
solana-bincode = { path = "bincode", version = "3.0.0" }
solana-blake3-hasher = { path = "blake3-hasher", version = "3.0.0" }
solana-bls-signatures = { path = "bls-signatures", version = "1.0.0" }
solana-bls12-381 = { path = "bls12-381-core", version = "0.1.0" }
solana-bn254 = { path = "bn254", version = "3.0.0" }
solana-borsh = { path = "borsh", version = "3.0.0" }
solana-client-traits = { path = "client-traits", version = "3.0.0" }
Expand Down
38 changes: 38 additions & 0 deletions bls12-381-core/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
[package]
name = "solana-bls12-381"
description = "Solana BLS12-381"
documentation = "https://docs.rs/solana-bls12-381"
version = "0.1.0"
authors = { workspace = true }
repository = { workspace = true }
homepage = { workspace = true }
license = { workspace = true }
edition = { workspace = true }
include = ["src/**/*"]

[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
all-features = true
rustdoc-args = ["--cfg=docsrs"]

[dependencies]

[target.'cfg(not(target_os = "solana"))'.dependencies]
blst = { workspace = true }
blstrs = { workspace = true }
group = { workspace = true }
pairing = { workspace = true }

[target.'cfg(target_os = "solana")'.dependencies]
solana-define-syscall = { workspace = true }

[dev-dependencies]
array-bytes = { workspace = true }
criterion = { workspace = true }
hex-literal = { workspace = true }
serde = { workspace = true }
serde_derive = { workspace = true }
serde_json = { workspace = true }

[lints]
workspace = true
254 changes: 254 additions & 0 deletions bls12-381-core/src/addition.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,254 @@
use {
crate::{reverse_48_byte_chunks, swap_g2_c0_c1, Endianness, Version},
blstrs::{G1Affine, G1Projective, G2Affine, G2Projective},
};

pub fn bls12_381_g1_addition(
_version: Version,
input: &[u8],
endianness: Endianness,
) -> Option<Vec<u8>> {
if input.len() != 192 {
return None;
}

let p1 = match endianness {
Endianness::BE => {
// make zero-copy when possible
let bytes: &[u8; 96] = input[0..96].try_into().ok()?;
G1Affine::from_uncompressed(bytes).into_option()?
}
Endianness::LE => {
// to reverse the bytes, we need an owned copy
let mut bytes: [u8; 96] = input[0..96].try_into().ok()?;
reverse_48_byte_chunks(&mut bytes);
G1Affine::from_uncompressed(&bytes).into_option()?
}
};

let p2 = match endianness {
Endianness::BE => {
let bytes: &[u8; 96] = input[96..192].try_into().ok()?;
G1Affine::from_uncompressed(bytes).into_option()?
}
Endianness::LE => {
let mut bytes: [u8; 96] = input[96..192].try_into().ok()?;
reverse_48_byte_chunks(&mut bytes);
G1Affine::from_uncompressed(&bytes).into_option()?
}
};

let p1_proj: G1Projective = p1.into();
let sum_proj = p1_proj + p2;
let mut sum_affine = sum_proj.to_uncompressed();

if matches!(endianness, Endianness::LE) {
reverse_48_byte_chunks(&mut sum_affine);
}
Some(sum_affine.to_vec())
}

pub fn bls12_381_g2_addition(
_version: Version,
input: &[u8],
endianness: Endianness,
) -> Option<Vec<u8>> {
if input.len() != 384 {
return None;
}

let p1 = match endianness {
Endianness::BE => {
let bytes: &[u8; 192] = input[0..192].try_into().ok()?;
G2Affine::from_uncompressed(bytes).into_option()?
}
Endianness::LE => {
let mut bytes: [u8; 192] = input[0..192].try_into().ok()?;
reverse_48_byte_chunks(&mut bytes); // Fix Fq endianness
swap_g2_c0_c1(&mut bytes); // Fix Fq2 ordering
G2Affine::from_uncompressed(&bytes).into_option()?
}
};

let p2 = match endianness {
Endianness::BE => {
let bytes: &[u8; 192] = input[192..384].try_into().ok()?;
G2Affine::from_uncompressed(bytes).into_option()?
}
Endianness::LE => {
let mut bytes: [u8; 192] = input[192..384].try_into().ok()?;
reverse_48_byte_chunks(&mut bytes);
swap_g2_c0_c1(&mut bytes);
G2Affine::from_uncompressed(&bytes).into_option()?
}
};

let p1_proj: G2Projective = p1.into();
let sum_proj = p1_proj + p2;
let mut sum_affine = sum_proj.to_uncompressed();

if matches!(endianness, Endianness::LE) {
swap_g2_c0_c1(&mut sum_affine);
reverse_48_byte_chunks(&mut sum_affine);
}
Some(sum_affine.to_vec())
}

#[cfg(test)]
mod tests {
use {super::*, crate::test_vectors::*};

fn run_g1_test(
op_name: &str,
func: fn(Version, &[u8], Endianness) -> Option<Vec<u8>>,
input_be: &[u8],
output_be: &[u8],
input_le: &[u8],
output_le: &[u8],
) {
// Test Big Endian
let result_be = func(Version::V0, input_be, Endianness::BE);
assert_eq!(
result_be,
Some(output_be.to_vec()),
"G1 {} BE Test Failed",
op_name
);

// Test Little Endian
let result_le = func(Version::V0, input_le, Endianness::LE);
assert_eq!(
result_le,
Some(output_le.to_vec()),
"G1 {} LE Test Failed",
op_name
);
}

fn run_g2_test(
op_name: &str,
func: fn(Version, &[u8], Endianness) -> Option<Vec<u8>>,
input_be: &[u8],
output_be: &[u8],
input_le: &[u8],
output_le: &[u8],
) {
// Test Big Endian
let result_be = func(Version::V0, input_be, Endianness::BE);
assert_eq!(
result_be,
Some(output_be.to_vec()),
"G2 {} BE Test Failed",
op_name
);

// Test Little Endian
let result_le = func(Version::V0, input_le, Endianness::LE);
assert_eq!(
result_le,
Some(output_le.to_vec()),
"G2 {} LE Test Failed",
op_name
);
}

#[test]
fn test_g1_addition_random() {
run_g1_test(
"ADD",
bls12_381_g1_addition,
INPUT_BE_G1_ADD_RANDOM,
OUTPUT_BE_G1_ADD_RANDOM,
INPUT_LE_G1_ADD_RANDOM,
OUTPUT_LE_G1_ADD_RANDOM,
);
}

#[test]
fn test_g1_addition_doubling() {
run_g1_test(
"ADD",
bls12_381_g1_addition,
INPUT_BE_G1_ADD_DOUBLING,
OUTPUT_BE_G1_ADD_DOUBLING,
INPUT_LE_G1_ADD_DOUBLING,
OUTPUT_LE_G1_ADD_DOUBLING,
);
}

#[test]
fn test_g1_addition_infinity_edge_cases() {
// P + Inf
run_g1_test(
"ADD",
bls12_381_g1_addition,
INPUT_BE_G1_ADD_P_PLUS_INF,
OUTPUT_BE_G1_ADD_P_PLUS_INF,
INPUT_LE_G1_ADD_P_PLUS_INF,
OUTPUT_LE_G1_ADD_P_PLUS_INF,
);
// Inf + Inf
run_g1_test(
"ADD",
bls12_381_g1_addition,
INPUT_BE_G1_ADD_INF_PLUS_INF,
OUTPUT_BE_G1_ADD_INF_PLUS_INF,
INPUT_LE_G1_ADD_INF_PLUS_INF,
OUTPUT_LE_G1_ADD_INF_PLUS_INF,
);
}

#[test]
fn test_g2_addition_random() {
run_g2_test(
"ADD",
bls12_381_g2_addition,
INPUT_BE_G2_ADD_RANDOM,
OUTPUT_BE_G2_ADD_RANDOM,
INPUT_LE_G2_ADD_RANDOM,
OUTPUT_LE_G2_ADD_RANDOM,
);
}

#[test]
fn test_g2_addition_doubling() {
run_g2_test(
"ADD",
bls12_381_g2_addition,
INPUT_BE_G2_ADD_DOUBLING,
OUTPUT_BE_G2_ADD_DOUBLING,
INPUT_LE_G2_ADD_DOUBLING,
OUTPUT_LE_G2_ADD_DOUBLING,
);
}

#[test]
fn test_g2_addition_infinity_edge_cases() {
// P + Inf
run_g2_test(
"ADD",
bls12_381_g2_addition,
INPUT_BE_G2_ADD_P_PLUS_INF,
OUTPUT_BE_G2_ADD_P_PLUS_INF,
INPUT_LE_G2_ADD_P_PLUS_INF,
OUTPUT_LE_G2_ADD_P_PLUS_INF,
);
// Inf + Inf
run_g2_test(
"ADD",
bls12_381_g2_addition,
INPUT_BE_G2_ADD_INF_PLUS_INF,
OUTPUT_BE_G2_ADD_INF_PLUS_INF,
INPUT_LE_G2_ADD_INF_PLUS_INF,
OUTPUT_LE_G2_ADD_INF_PLUS_INF,
);
}

#[test]
fn test_invalid_length() {
// G1 expects 192 bytes
assert!(bls12_381_g1_addition(Version::V0, &[0u8; 191], Endianness::BE).is_none());
// G2 expects 384 bytes
assert!(bls12_381_g2_addition(Version::V0, &[0u8; 383], Endianness::BE).is_none());
}
}
Loading
Loading