Skip to content

Commit 44587cc

Browse files
authored
refactor(bb): remove --output_format, have one-size-fits-all format (#16201)
This changes vks to be serialized as bytes using to_field_elements. Proofs were already byte-compatible, this allows having bb not worry about how to split fields into JSON The changes are holistic as we now need to split the fields from the binary vk/proof - but this is now trivial with the new deserialization format.
1 parent a05333c commit 44587cc

File tree

54 files changed

+585
-771
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+585
-771
lines changed

barretenberg/acir_tests/bbjs-test/src/index.ts

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,7 @@ const logger = pino({
99
});
1010

1111
const proofPath = (dir: string) => path.join(dir, "proof");
12-
const proofAsFieldsPath = (dir: string) => path.join(dir, "proof_fields.json");
13-
const publicInputsAsFieldsPath = (dir: string) =>
14-
path.join(dir, "public_inputs_fields.json");
12+
const publicInputsPath = (dir: string) => path.join(dir, "public_inputs");
1513
const vkeyPath = (dir: string) => path.join(dir, "vk");
1614

1715
async function generateProof({
@@ -27,7 +25,7 @@ async function generateProof({
2725
oracleHash?: string;
2826
multiThreaded?: boolean;
2927
}) {
30-
const { UltraHonkBackend, deflattenFields } = await import("@aztec/bb.js");
28+
const { UltraHonkBackend } = await import("@aztec/bb.js");
3129

3230
logger.debug(`Generating proof for ${bytecodePath}...`);
3331
const circuitArtifact = await fs.readFile(bytecodePath);
@@ -45,17 +43,16 @@ async function generateProof({
4543
await fs.writeFile(proofPath(outputDirectory), Buffer.from(proof.proof));
4644
logger.debug("Proof written to " + proofPath(outputDirectory));
4745

48-
await fs.writeFile(
49-
publicInputsAsFieldsPath(outputDirectory),
50-
JSON.stringify(proof.publicInputs)
46+
// Convert public inputs from field strings to binary
47+
const publicInputsBuffer = Buffer.concat(
48+
proof.publicInputs.map((field: string) => {
49+
const hex = field.startsWith('0x') ? field.slice(2) : field;
50+
return Buffer.from(hex.padStart(64, '0'), 'hex');
51+
})
5152
);
53+
await fs.writeFile(publicInputsPath(outputDirectory), publicInputsBuffer);
5254
logger.debug(
53-
"Public inputs written to " + publicInputsAsFieldsPath(outputDirectory)
54-
);
55-
56-
await fs.writeFile(
57-
proofAsFieldsPath(outputDirectory),
58-
JSON.stringify(deflattenFields(proof.proof))
55+
"Public inputs written to " + publicInputsPath(outputDirectory)
5956
);
6057

6158
const verificationKey = await backend.getVerificationKey({
@@ -69,21 +66,24 @@ async function generateProof({
6966
}
7067

7168
async function verifyProof({ directory }: { directory: string }) {
72-
const { BarretenbergVerifier } = await import("@aztec/bb.js");
69+
const { UltraHonkVerifierBackend } = await import("@aztec/bb.js");
7370

74-
const verifier = new BarretenbergVerifier();
71+
const verifier = new UltraHonkVerifierBackend();
7572

7673
const proof = await fs.readFile(proofPath(directory));
7774

78-
const publicInputs = JSON.parse(
79-
await fs.readFile(publicInputsAsFieldsPath(directory), "utf8")
80-
);
75+
// Read binary public inputs and convert to field strings
76+
const publicInputsBinary = await fs.readFile(publicInputsPath(directory));
77+
const publicInputs = [];
78+
for (let i = 0; i < publicInputsBinary.length; i += 32) {
79+
const chunk = publicInputsBinary.slice(i, Math.min(i + 32, publicInputsBinary.length));
80+
publicInputs.push('0x' + chunk.toString('hex'));
81+
}
8182
logger.debug(`publicInputs: ${JSON.stringify(publicInputs)}`);
82-
const vkey = await fs.readFile(vkeyPath(directory));
83+
const verificationKey = await fs.readFile(vkeyPath(directory));
8384

84-
const verified = await verifier.verifyUltraHonkProof(
85-
{ proof: new Uint8Array(proof), publicInputs },
86-
new Uint8Array(vkey)
85+
const verified = await verifier.verifyProof(
86+
{ proof: new Uint8Array(proof), publicInputs, verificationKey},
8787
);
8888

8989
await verifier.destroy();

barretenberg/acir_tests/bootstrap.sh

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,18 @@ tests_hash=$(hash_str \
2121
../ts/.rebuild_patterns \
2222
../noir/))
2323

24+
function hex_to_fields_json {
25+
# 1. split encoded hex into 64-character lines 3. encode as JSON array of hex strings
26+
fold -w64 | jq -R -s -c 'split("\n") | map(select(length > 0)) | map("0x" + .)'
27+
}
28+
2429
# Generate inputs for a given recursively verifying program.
2530
function run_proof_generation {
2631
local program=$1
2732
local native_build_dir=$(../cpp/scripts/native-preset-build-dir)
2833
local bb=$(realpath ../cpp/$native_build_dir/bin/bb)
2934
local outdir=$(mktemp -d)
3035
trap "rm -rf $outdir" EXIT
31-
local adjustment=16
3236
local ipa_accumulation_flag=""
3337

3438
cd ./acir_tests/assert_statement
@@ -37,35 +41,37 @@ function run_proof_generation {
3741

3842
# Adjust settings based on program type
3943
if [[ $program == *"rollup"* ]]; then
40-
adjustment=26
4144
ipa_accumulation_flag="--ipa_accumulation"
4245
fi
4346
# If the test program has zk in it's name would like to use the zk prover, so we empty the flag in this case.
4447
if [[ $program == *"zk"* ]]; then
4548
disable_zk=""
4649
fi
47-
local prove_cmd="$bb prove --scheme ultra_honk $disable_zk $ipa_accumulation_flag --output_format fields --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz"
50+
local prove_cmd="$bb prove --scheme ultra_honk $disable_zk $ipa_accumulation_flag --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz"
4851
echo_stderr "$prove_cmd"
4952
dump_fail "$prove_cmd"
5053

51-
local vk_fields=$(cat "$outdir/vk_fields.json")
52-
local vk_hash_fields=$(cat "$outdir/vk_hash_fields.json")
53-
local public_inputs_fields=$(cat "$outdir/public_inputs_fields.json")
54-
local proof_fields=$(cat "$outdir/proof_fields.json")
5554

56-
generate_toml "$program" "$vk_fields" "$vk_hash_fields" "$proof_fields" "$public_inputs_fields"
55+
# Split the hex-encoded vk bytes into fields boundaries (but still hex-encoded), first making 64-character lines and then encoding as JSON.
56+
# This used to be done by barretenberg itself, but with serialization now always being in field elements we can do it outside of bb.
57+
local vk_fields=$(cat "$outdir/vk" | xxd -p -c 0 | hex_to_fields_json)
58+
local vk_hash_field="\"0x$(cat "$outdir/vk_hash" | xxd -p -c 0)\""
59+
local public_inputs_fields=$(cat "$outdir/public_inputs" | xxd -p -c 0 | hex_to_fields_json)
60+
local proof_fields=$(cat "$outdir/proof" | xxd -p -c 0 | hex_to_fields_json)
61+
62+
generate_toml "$program" "$vk_fields" "$vk_hash_field" "$proof_fields" "$public_inputs_fields"
5763
}
5864

5965
function generate_toml {
6066
local program=$1
6167
local vk_fields=$2
62-
local vk_hash_fields=$3
68+
local vk_hash_field=$3
6369
local proof_fields=$4
64-
local num_inner_public_inputs=$5
70+
local public_inputs_fields=$5
6571
local output_file="../$program/Prover.toml"
6672

6773
jq -nr \
68-
--arg key_hash "$vk_hash_fields" \
74+
--arg key_hash "$vk_hash_field" \
6975
--argjson vk_f "$vk_fields" \
7076
--argjson public_inputs_f "$public_inputs_fields" \
7177
--argjson proof_f "$proof_fields" \
@@ -79,7 +85,6 @@ function generate_toml {
7985
}
8086

8187
function regenerate_recursive_inputs {
82-
local program=$1
8388
# Compile the assert_statement test as it's used for the recursive tests.
8489
cd ./acir_tests/assert_statement
8590
local nargo=$(realpath ../../../../noir/noir-repo/target/release/nargo)
@@ -91,7 +96,7 @@ function regenerate_recursive_inputs {
9196
parallel 'run_proof_generation {}' ::: $(ls internal_test_programs)
9297
}
9398

94-
export -f regenerate_recursive_inputs run_proof_generation generate_toml
99+
export -f hex_to_fields_json regenerate_recursive_inputs run_proof_generation generate_toml
95100

96101
function compile {
97102
echo_header "Compiling acir_tests"
@@ -160,10 +165,10 @@ function test_cmds {
160165

161166
# bb.js browser tests. Isolate because server.
162167
local browser_prefix="$tests_hash:ISOLATE=1:NET=1:CPUS=8"
163-
echo "$browser_prefix:NAME=chrome_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof chrome"
164-
echo "$browser_prefix:NAME=chrome_a_1_mul $scripts/browser_prove.sh a_1_mul chrome"
165-
echo "$browser_prefix:NAME=webkit_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof webkit"
166-
echo "$browser_prefix:NAME=webkit_a_1_mul $scripts/browser_prove.sh a_1_mul webkit"
168+
echo "$browser_prefix $scripts/browser_prove.sh verify_honk_proof chrome"
169+
echo "$browser_prefix $scripts/browser_prove.sh a_1_mul chrome"
170+
echo "$browser_prefix $scripts/browser_prove.sh verify_honk_proof webkit"
171+
echo "$browser_prefix $scripts/browser_prove.sh a_1_mul webkit"
167172

168173
# bb.js tests.
169174
# ecdsa_secp256r1_3x through bb.js on node to check 256k support.

barretenberg/acir_tests/browser-test-app/src/index.ts

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,17 +30,16 @@ function installUltraHonkGlobals() {
3030
}
3131

3232
async function verify(proofData: ProofData, verificationKey: Uint8Array) {
33-
const { BarretenbergVerifier } = await import("@aztec/bb.js");
33+
const { UltraHonkVerifierBackend } = await import("@aztec/bb.js");
3434

3535
logger.debug(`verifying...`);
36-
const verifier = new BarretenbergVerifier();
37-
const verified = await verifier.verifyUltraHonkProof(
38-
proofData,
39-
verificationKey
36+
const backend = new UltraHonkVerifierBackend();
37+
const verified = await backend.verifyProof(
38+
{...proofData, verificationKey}
4039
);
4140
logger.debug(`verified: ${verified}`);
4241

43-
await verifier.destroy();
42+
await backend.destroy();
4443

4544
logger.debug("test complete.");
4645
return verified;

barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ $bb prove \
2222
-b target/program.json \
2323
-w target/witness.gz \
2424
-k output-$$/vk \
25-
--output_format bytes_and_fields \
2625
-o output-$$
2726

2827
# Verify the proof with bb.js classes

barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,16 +34,15 @@ mkdir -p output-$$
3434
trap "rm -rf output-$$" EXIT
3535

3636
# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs
37-
$bb prove $flags -b target/program.json --oracle_hash keccak --output_format bytes_and_fields --write_vk -o output-$$
37+
$bb prove $flags -b target/program.json --oracle_hash keccak --write_vk -o output-$$
3838
$bb verify $flags --oracle_hash keccak -i output-$$/public_inputs -k output-$$/vk -p output-$$/proof
3939
$bb write_solidity_verifier $write_contract_flags -k output-$$/vk -o output-$$/Verifier.sol
4040

4141
# Use solcjs to compile the generated key contract with the template verifier and test contract
4242
# index.js will start an anvil, on a random port
4343
# Deploy the verifier then send a test transaction
4444
PROOF="output-$$/proof" \
45-
PROOF_AS_FIELDS="output-$$/proof_fields.json" \
46-
PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \
45+
PUBLIC_INPUTS="output-$$/public_inputs" \
4746
VERIFIER_PATH="output-$$/Verifier.sol" \
4847
TEST_PATH="../../sol-test/HonkTest.sol" \
4948
HAS_ZK="$has_zk" \

barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,7 @@ node ../../bbjs-test prove \
1414
-w target/witness.gz \
1515
-o output-$$
1616

17-
proof_bytes=$(cat output-$$/proof | xxd -p)
18-
public_inputs=$(cat output-$$/public_inputs_fields.json | jq -r '.[]')
19-
20-
public_inputs_bytes=""
21-
for input in $public_inputs; do
22-
public_inputs_bytes+=$input
23-
done
24-
25-
# Combine proof header and the proof to a single file
26-
echo -n $proof_bytes | xxd -r -p > output-$$/proof
27-
echo -n $public_inputs_bytes | xxd -r -p > output-$$/public_inputs
17+
# The proof and public_inputs are already in binary format from bbjs-test
2818

2919
bb=$(../../../cpp/scripts/find-bb)
3020
# Verify the proof with bb cli

barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,7 @@ $bb write_solidity_verifier --scheme ultra_honk -k output-$$/vk -o output-$$/Ver
3535

3636
# Verify the proof using the solidity verifier
3737
PROOF="output-$$/proof" \
38-
PROOF_AS_FIELDS="output-$$/proof_fields.json" \
39-
PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \
38+
PUBLIC_INPUTS="output-$$/public_inputs" \
4039
VERIFIER_PATH="output-$$/Verifier.sol" \
4140
TEST_PATH="../../sol-test/HonkTest.sol" \
4241
HAS_ZK="$has_zk" \

barretenberg/acir_tests/sol-test/src/index.js

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,20 @@ const linkLibrary = (bytecode, libraryName, libraryAddress) => {
165165
return bytecode.replace(regex, address);
166166
};
167167

168+
/**
169+
* Converts binary data to array of field elements (32-byte chunks as hex strings)
170+
* @param {Buffer} buffer - Binary data
171+
* @return {Array<String>} Array of hex strings with 0x prefix
172+
*/
173+
const binaryToFields = (buffer) => {
174+
const fields = [];
175+
for (let i = 0; i < buffer.length; i += 32) {
176+
const chunk = buffer.slice(i, i + 32);
177+
fields.push('0x' + chunk.toString('hex'));
178+
}
179+
return fields;
180+
};
181+
168182
/**
169183
* Takes in a proof as fields, and returns the public inputs, as well as the number of public inputs
170184
* @param {Array<String>} proofAsFields
@@ -225,26 +239,22 @@ try {
225239
const proof = readFileSync(proofPath);
226240
proofStr = proof.toString("hex");
227241

228-
let publicInputsAsFieldsPath = getEnvVarCanBeUndefined(
229-
"PUBLIC_INPUTS_AS_FIELDS"
230-
); // PUBLIC_INPUTS_AS_FIELDS is not defined for bb plonk, but is for bb honk and bbjs honk.
231-
var publicInputs;
232-
let proofAsFieldsPath = getEnvVarCanBeUndefined("PROOF_AS_FIELDS"); // PROOF_AS_FIELDS is not defined for bbjs, but is for bb plonk and bb honk.
242+
let publicInputsPath = getEnvVarCanBeUndefined("PUBLIC_INPUTS");
243+
var publicInputs = [];
233244
let numExtraPublicInputs = 0;
234245
let extraPublicInputs = [];
235-
if (proofAsFieldsPath) {
236-
const proofAsFields = readFileSync(proofAsFieldsPath);
246+
247+
// For flows that use binary proof format, extract public inputs from the proof
248+
const proofAsFields = binaryToFields(proof);
249+
if (proofAsFields.length > NUMBER_OF_FIELDS_IN_PROOF) {
237250
// We need to extract the public inputs from the proof. This might be empty, or just the pairing point object, or be the entire public inputs...
238-
[numExtraPublicInputs, extraPublicInputs] = readPublicInputs(
239-
JSON.parse(proofAsFields.toString())
240-
);
251+
[numExtraPublicInputs, extraPublicInputs] = readPublicInputs(proofAsFields);
241252
}
242-
// We need to do this because plonk doesn't define this path
243-
if (publicInputsAsFieldsPath) {
244-
const innerPublicInputs = JSON.parse(
245-
readFileSync(publicInputsAsFieldsPath).toString()
246-
); // assumes JSON array of PI hex strings
247253

254+
// Read public inputs from binary file if available
255+
if (publicInputsPath) {
256+
const publicInputsBinary = readFileSync(publicInputsPath);
257+
const innerPublicInputs = binaryToFields(publicInputsBinary);
248258
publicInputs = innerPublicInputs.concat(extraPublicInputs);
249259
} else {
250260
// for plonk, the extraPublicInputs are all of the public inputs

barretenberg/bootstrap.sh

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
#!/usr/bin/env bash
22
source $(git rev-parse --show-toplevel)/ci3/source
33

4-
54
function bootstrap_all {
65
# To run bb we need a crs.
76
# Download ignition up front to ensure no race conditions at runtime.
@@ -32,20 +31,6 @@ case "$cmd" in
3231
"release-preview")
3332
./docs/bootstrap.sh release-preview
3433
;;
35-
bootstrap_e2e_hack)
36-
echo "WARNING: This assumes your PR only changes barretenberg and the rest of the repository is unchanged from master."
37-
echo "WARNING: This is only sound if you have not changed VK generation! (or noir-projects VKs will be incorrect)."
38-
echo "WARNING: It builds up until yarn-project and allows end-to-end tests (not boxes/playground/release image etc)."
39-
merge_base=$(git merge-base HEAD origin/master)
40-
for project in noir barretenberg avm-transpiler noir-projects l1-contracts yarn-project ; do
41-
if [ $project == barretenberg ]; then
42-
../$project/bootstrap.sh # i.e. this script
43-
else
44-
AZTEC_CACHE_COMMIT=$merge_base ../$project/bootstrap.sh
45-
fi
46-
done
47-
;;
48-
4934
*)
5035
echo "Unknown command: $cmd"
5136
exit 1

barretenberg/cpp/scripts/test_civc_standalone_vks_havent_changed.sh

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
#!/bin/bash
22
source $(git rev-parse --show-toplevel)/ci3/source
33

4+
# export bb as it is needed when using exported functions
5+
export bb=$(./find-bb)
46
cd ..
57

68
# NOTE: We pin the captured IVC inputs to a known master commit, exploiting that there won't be frequent changes.
@@ -11,7 +13,8 @@ cd ..
1113
# - Generate a hash for versioning: sha256sum bb-civc-inputs.tar.gz
1214
# - Upload the compressed results: aws s3 cp bb-civc-inputs.tar.gz s3://aztec-ci-artifacts/protocol/bb-civc-inputs-[hash(0:8)].tar.gz
1315
# Note: In case of the "Test suite failed to run ... Unexpected token 'with' " error, need to run: docker pull aztecprotocol/build:3.0
14-
pinned_short_hash="e5081516"
16+
17+
pinned_short_hash="9c83acbc"
1518
pinned_civc_inputs_url="https://aztec-ci-artifacts.s3.us-east-2.amazonaws.com/protocol/bb-civc-inputs-${pinned_short_hash}.tar.gz"
1619

1720
function compress_and_upload {
@@ -44,7 +47,7 @@ if [[ "${1:-}" == "--update_inputs" ]]; then
4447
# Generate new inputs
4548
echo "Running bootstrap to generate new IVC inputs..."
4649

47-
../../bootstrap.sh # bootstrap aztec-packages from root
50+
BOOTSTRAP_TO=yarn-project ../../bootstrap.sh # bootstrap aztec-packages from root
4851
../../yarn-project/end-to-end/bootstrap.sh build_bench # build bench to generate IVC inputs
4952

5053
compress_and_upload ../../yarn-project/end-to-end/example-app-ivc-inputs-out
@@ -62,9 +65,9 @@ function check_circuit_vks {
6265
local flow_folder="$inputs_tmp_dir/$1"
6366

6467
if [[ "${2:-}" == "--update_inputs" ]]; then
65-
./build/bin/bb check --update_inputs --scheme client_ivc --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" || { echo_stderr "Error: Likely VK change detected in $flow_folder! Updating inputs."; exit 1; }
68+
$bb check --update_inputs --scheme client_ivc --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" || { echo_stderr "Error: Likely VK change detected in $flow_folder! Updating inputs."; exit 1; }
6669
else
67-
./build/bin/bb check --scheme client_ivc --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" || { echo_stderr "Error: Likely VK change detected in $flow_folder!"; exit 1; }
70+
$bb check --scheme client_ivc --ivc_inputs_path "$flow_folder/ivc-inputs.msgpack" || { echo_stderr "Error: Likely VK change detected in $flow_folder!"; exit 1; }
6871
fi
6972
}
7073

0 commit comments

Comments
 (0)