diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 2d33ba7cbf3a..68e1b00a467e 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -69,6 +69,7 @@ "@aztec/blob-lib": "workspace:^", "@aztec/blob-sink": "workspace:^", "@aztec/constants": "workspace:^", + "@aztec/epoch-cache": "workspace:^", "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 7496b1302ee2..b4faecd0d16c 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -2,9 +2,11 @@ import { Blob } from '@aztec/blob-lib'; import type { BlobSinkClientInterface } from '@aztec/blob-sink/client'; import { BlobWithIndex } from '@aztec/blob-sink/types'; import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; +import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; import { DefaultL1ContractsConfig, InboxContract, RollupContract, type ViemPublicClient } from '@aztec/ethereum'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; import { times } from '@aztec/foundation/collection'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -13,10 +15,11 @@ import { sleep } from '@aztec/foundation/sleep'; import { bufferToHex, withoutHexPrefix } from '@aztec/foundation/string'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { L2Block } from '@aztec/stdlib/block'; +import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { PrivateLog } from '@aztec/stdlib/logs'; import { InboxLeaf } from '@aztec/stdlib/messaging'; +import { makeBlockAttestationFromBlock } from '@aztec/stdlib/testing'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { jest } from '@jest/globals'; @@ -30,6 +33,8 @@ import { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; import { updateRollingHash } from './structs/inbox_message.js'; interface MockRollupContractRead { + /** Returns the target committee size */ + getTargetCommitteeSize: () => Promise; /** Returns the rollup version. */ getVersion: () => Promise; /** Given an L2 block number, returns the archive. */ @@ -81,9 +86,19 @@ describe('Archiver', () => { publicClient.getBlockNumber.mockResolvedValue(nums.at(-1)!); }; + const makeBlock = async (blockNumber: number) => { + const block = await L2Block.random(blockNumber, txsPerBlock, blockNumber + 1, 2); + block.header.globalVariables.timestamp = BigInt(now + Number(ETHEREUM_SLOT_DURATION) * (blockNumber + 1)); + block.body.txEffects.forEach((txEffect, i) => { + txEffect.privateLogs = times(getNumPrivateLogsForTx(block.number, i), () => PrivateLog.random()); + }); + return block; + }; + let publicClient: MockProxy; let instrumentation: MockProxy; let blobSinkClient: MockProxy; + let epochCache: MockProxy; let archiverStore: ArchiverDataStore; let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32 }; let now: number; @@ -132,6 +147,8 @@ describe('Archiver', () => { }) as any); blobSinkClient = mock(); + epochCache = mock(); + epochCache.getCommitteeForEpoch.mockResolvedValue({ committee: [] as EthAddress[] } as EpochCommitteeInfo); const tracer = getTelemetryClient().getTracer(''); instrumentation = mock({ isEnabled: () => true, tracer }); @@ -152,17 +169,12 @@ describe('Archiver', () => { archiverStore, { pollingIntervalMs: 1000, batchSize: 1000 }, blobSinkClient, + epochCache, instrumentation, l1Constants, ); - blocks = await Promise.all(blockNumbers.map(x => L2Block.random(x, txsPerBlock, x + 1, 2))); - blocks.forEach((block, i) => { - block.header.globalVariables.timestamp = BigInt(now + Number(ETHEREUM_SLOT_DURATION) * (i + 1)); - block.body.txEffects.forEach((txEffect, i) => { - txEffect.privateLogs = times(getNumPrivateLogsForTx(block.number, i), () => PrivateLog.random()); - }); - }); + blocks = await Promise.all(blockNumbers.map(makeBlock)); // TODO(palla/archiver) Instead of guessing the archiver requests with mockResolvedValueOnce, // we should use a mock implementation that returns the expected value based on the input. @@ -171,7 +183,7 @@ describe('Archiver', () => { // blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobsFromBlock(b))); // blobsFromBlocks.forEach(blobs => blobSinkClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - // rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + // rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); // publicClient.getTransaction.mockImplementation((args: { hash?: `0x${string}` }) => { // const index = parseInt(withoutHexPrefix(args.hash!)); // if (index > blocks.length) { @@ -252,7 +264,7 @@ describe('Archiver', () => { let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); mockL1BlockNumbers(2500n, 2510n, 2520n); @@ -334,7 +346,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. @@ -368,6 +380,81 @@ describe('Archiver', () => { }); }, 10_000); + it('ignores block 2 because it had invalid attestations', async () => { + let latestBlockNum = await archiver.getBlockNumber(); + expect(latestBlockNum).toEqual(0); + + // Setup a committee of 3 signers + mockRollupRead.getTargetCommitteeSize.mockResolvedValue(3n); + const signers = times(3, Secp256k1Signer.random); + const committee = signers.map(signer => signer.address); + epochCache.getCommitteeForEpoch.mockResolvedValue({ committee } as EpochCommitteeInfo); + + // Add the attestations from the signers to all 3 blocks + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b, signers))); + const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); + const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobsFromBlock(b))); + + // And define a bad block 2 with attestations from random signers + const badBlock2 = await makeBlock(2); + badBlock2.archive.root = new Fr(0x1002); + const badBlock2RollupTx = await makeRollupTx(badBlock2, times(3, Secp256k1Signer.random)); + const badBlock2BlobHashes = await makeVersionedBlobHashes(badBlock2); + const badBlock2Blobs = await makeBlobsFromBlock(badBlock2); + + // Return the archive root for the bad block 2 when queried + mockRollupRead.archiveAt.mockImplementation((args: readonly [bigint]) => + Promise.resolve((args[0] === 2n ? badBlock2 : blocks[Number(args[0] - 1n)]).archive.root.toString()), + ); + + logger.warn(`Created 3 valid blocks`); + blocks.forEach(block => logger.warn(`Block ${block.number} with root ${block.archive.root.toString()}`)); + logger.warn(`Created invalid block 2 with root ${badBlock2.archive.root.toString()}`); + + // During the first archiver loop, we fetch block 1 and the block 2 with bad attestations + publicClient.getBlockNumber.mockResolvedValue(85n); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), blobHashes[0]); + makeL2BlockProposedEvent(80n, 2n, badBlock2.archive.root.toString(), badBlock2BlobHashes); + mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 2n, badBlock2.archive.root.toString(), GENESIS_ROOT]); + publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]).mockResolvedValueOnce(badBlock2RollupTx); + blobSinkClient.getBlobSidecar.mockResolvedValueOnce(blobsFromBlocks[0]).mockResolvedValueOnce(badBlock2Blobs); + + // Start archiver, the bad block 2 should not be synced + await archiver.start(true); + latestBlockNum = await archiver.getBlockNumber(); + expect(latestBlockNum).toEqual(1); + + // Now we go for another loop, where a proper block 2 is proposed with correct attestations + // IRL there would be an "Invalidated" event, but we are not currently relying on it + logger.warn(`Adding new block 2 with correct attestations and a block 3`); + publicClient.getBlockNumber.mockResolvedValue(100n); + makeL2BlockProposedEvent(90n, 2n, blocks[1].archive.root.toString(), blobHashes[1]); + makeL2BlockProposedEvent(95n, 3n, blocks[2].archive.root.toString(), blobHashes[2]); + mockRollup.read.status.mockResolvedValue([ + 0n, + GENESIS_ROOT, + 3n, + blocks[2].archive.root.toString(), + blocks[0].archive.root.toString(), + ]); + publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[1]).mockResolvedValueOnce(rollupTxs[2]); + blobSinkClient.getBlobSidecar.mockResolvedValueOnce(blobsFromBlocks[1]).mockResolvedValueOnce(blobsFromBlocks[2]); + mockRollupRead.archiveAt.mockImplementation((args: readonly [bigint]) => + Promise.resolve(blocks[Number(args[0] - 1n)].archive.root.toString()), + ); + + // Now we should move to block 3 + await waitUntilArchiverBlock(3); + latestBlockNum = await archiver.getBlockNumber(); + expect(latestBlockNum).toEqual(3); + + // And block 2 should return the proper one + const [block2] = await archiver.getPublishedBlocks(2, 1); + expect(block2.block.number).toEqual(2); + expect(block2.block.archive.root.toString()).toEqual(blocks[1].archive.root.toString()); + expect(block2.attestations.length).toEqual(3); + }, 10_000); + it('skip event search if no changes found', async () => { const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); @@ -376,7 +463,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); mockL1BlockNumbers(50n, 100n); @@ -414,7 +501,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); let mockedBlockNum = 0n; @@ -460,7 +547,7 @@ describe('Archiver', () => { // Lets take a look to see if we can find re-org stuff! await sleep(2000); - expect(loggerSpy).toHaveBeenCalledWith(`L2 prune has been detected.`); + expect(loggerSpy).toHaveBeenCalledWith(expect.stringContaining(`L2 prune has been detected`), expect.anything()); // Should also see the block number be reduced latestBlockNum = await archiver.getBlockNumber(); @@ -538,7 +625,7 @@ describe('Archiver', () => { blocks = [l2Block]; const blobHashes = await makeVersionedBlobHashes(l2Block); - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); publicClient.getBlockNumber.mockResolvedValue(l1BlockForL2Block); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes); @@ -570,7 +657,7 @@ describe('Archiver', () => { blocks = [l2Block]; const blobHashes = await makeVersionedBlobHashes(l2Block); - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); publicClient.getBlockNumber.mockResolvedValue(l1BlockForL2Block); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes); @@ -630,7 +717,7 @@ describe('Archiver', () => { blocks = [l2Block]; const blobHashes = await makeVersionedBlobHashes(l2Block); - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); publicClient.getBlockNumber.mockResolvedValue(lastL1BlockForEpoch); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes); @@ -660,7 +747,7 @@ describe('Archiver', () => { it('handles a block gap due to a spurious L2 prune', async () => { expect(await archiver.getBlockNumber()).toEqual(0); - const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const rollupTxs = await Promise.all(blocks.map(b => makeRollupTx(b))); const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobsFromBlock(b))); @@ -805,7 +892,11 @@ describe('Archiver', () => { * @param block - The L2Block. * @returns A fake tx with calldata that corresponds to calling process in the Rollup contract. */ -async function makeRollupTx(l2Block: L2Block) { +async function makeRollupTx(l2Block: L2Block, signers: Secp256k1Signer[] = []) { + const attestations = signers + .map(signer => makeBlockAttestationFromBlock(l2Block, signer)) + .map(blockAttestation => CommitteeAttestation.fromSignature(blockAttestation.signature)) + .map(committeeAttestation => committeeAttestation.toViem()); const header = l2Block.header.toPropose().toViem(); const blobInput = Blob.getPrefixedEthBlobCommitments(await Blob.getBlobsPerBlock(l2Block.body.toBlobFields())); const archive = toHex(l2Block.archive.root.toBuffer()); @@ -820,8 +911,8 @@ async function makeRollupTx(l2Block: L2Block) { stateReference, oracleInput: { feeAssetPriceModifier: 0n }, }, - RollupContract.packAttestations([]), - [], + RollupContract.packAttestations(attestations), + signers.map(signer => signer.address.toString()), blobInput, ], }); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 95a67ba601b9..b3530950c502 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -1,4 +1,5 @@ import type { BlobSinkClientInterface } from '@aztec/blob-sink/client'; +import { EpochCache } from '@aztec/epoch-cache'; import { BlockTagTooOldError, InboxContract, @@ -15,7 +16,7 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise'; import { sleep } from '@aztec/foundation/sleep'; import { count } from '@aztec/foundation/string'; -import { Timer, elapsed } from '@aztec/foundation/timer'; +import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer'; import type { CustomRange } from '@aztec/kv-store'; import { RollupAbi } from '@aztec/l1-artifacts'; import { @@ -61,7 +62,14 @@ import { ContractClassLog, type LogFilter, type PrivateLog, type PublicLog, TxSc import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { type BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx'; import type { UInt64 } from '@aztec/stdlib/types'; -import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; +import { + Attributes, + type TelemetryClient, + type Traceable, + type Tracer, + getTelemetryClient, + trackSpan, +} from '@aztec/telemetry-client'; import { EventEmitter } from 'events'; import groupBy from 'lodash.groupby'; @@ -79,12 +87,20 @@ import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './ import { ArchiverInstrumentation } from './instrumentation.js'; import type { InboxMessage } from './structs/inbox_message.js'; import type { PublishedL2Block } from './structs/published.js'; +import { validateBlockAttestations } from './validation.js'; /** * Helper interface to combine all sources this archiver implementation provides. */ export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource; +export type ArchiverDeps = { + telemetry?: TelemetryClient; + blobSinkClient: BlobSinkClientInterface; + epochCache?: EpochCache; + dateProvider?: DateProvider; +}; + /** * Pulls L2 blocks in a non-blocking manner and provides interface for their retrieval. * Responsible for handling robust L1 polling so that other components do not need to @@ -123,6 +139,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem readonly dataStore: ArchiverDataStore, private readonly config: { pollingIntervalMs: number; batchSize: number }, private readonly blobSinkClient: BlobSinkClientInterface, + private readonly epochCache: EpochCache, private readonly instrumentation: ArchiverInstrumentation, private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32 }, private readonly log: Logger = createLogger('archiver'), @@ -146,7 +163,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem public static async createAndSync( config: ArchiverConfig, archiverStore: ArchiverDataStore, - deps: { telemetry: TelemetryClient; blobSinkClient: BlobSinkClientInterface }, + deps: ArchiverDeps, blockUntilSynced = true, ): Promise { const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId); @@ -185,13 +202,17 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem batchSize: config.archiverBatchSize ?? 100, }; + const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps)); + const telemetry = deps.telemetry ?? getTelemetryClient(); + const archiver = new Archiver( publicClient, config.l1Contracts, archiverStore, opts, deps.blobSinkClient, - await ArchiverInstrumentation.new(deps.telemetry, () => archiverStore.estimateSize()), + epochCache, + await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()), l1Constants, ); await archiver.start(blockUntilSynced); @@ -333,8 +354,9 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem ); // And lastly we check if we are missing any L2 blocks behind us due to a possible L1 reorg. // We only do this if rollup cant prune on the next submission. Otherwise we will end up - // re-syncing the blocks we have just unwound above. - if (!rollupCanPrune) { + // re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid, + // since the archiver will rightfully refuse to sync up to it. + if (!rollupCanPrune && !rollupStatus.lastBlockIsInvalid) { await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber); } @@ -595,6 +617,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem provenArchive, pendingBlockNumber: Number(pendingBlockNumber), pendingArchive, + lastBlockIsInvalid: false, }; this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, { localPendingBlockNumber, @@ -680,7 +703,8 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem throw new Error(`Missing block ${localPendingBlockNumber}`); } - const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingBlock.archive.root.toString(); + const localPendingArchiveRoot = localPendingBlock.archive.root.toString(); + const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingArchiveRoot; if (noBlockSinceLast) { // We believe the following line causes a problem when we encounter L1 re-orgs. // Basically, by setting the synched L1 block number here, we are saying that we have @@ -694,13 +718,16 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem return rollupStatus; } - const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingBlock.archive.root.toString(); + const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingArchiveRoot; if (!localPendingBlockInChain) { // If our local pending block tip is not in the chain on L1 a "prune" must have happened // or the L1 have reorged. // In any case, we have to figure out how far into the past the action will take us. // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1. - this.log.debug(`L2 prune has been detected.`); + this.log.debug( + `L2 prune has been detected due to local pending block ${localPendingBlockNumber} not in chain`, + { localPendingBlockNumber, localPendingArchiveRoot, archiveForLocalPendingBlockNumber }, + ); let tipAfterUnwind = localPendingBlockNumber; while (true) { @@ -762,8 +789,21 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem ); const publishedBlocks = retrievedBlocks.map(b => retrievedBlockToPublishedL2Block(b)); + const validBlocks: PublishedL2Block[] = []; for (const block of publishedBlocks) { + const isProven = block.block.number <= provenBlockNumber; + if (!isProven && !(await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log))) { + this.log.warn(`Skipping block ${block.block.number} due to invalid attestations`, { + blockHash: block.block.hash(), + l1BlockNumber: block.l1.blockNumber, + }); + rollupStatus.lastBlockIsInvalid = true; + continue; + } + + rollupStatus.lastBlockIsInvalid = false; + validBlocks.push(block); this.log.debug(`Ingesting new L2 block ${block.block.number} with ${block.block.body.txEffects.length} txs`, { blockHash: block.block.hash(), l1BlockNumber: block.l1.blockNumber, @@ -773,10 +813,10 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem } try { - const [processDuration] = await elapsed(() => this.store.addBlocks(publishedBlocks)); + const [processDuration] = await elapsed(() => this.store.addBlocks(validBlocks)); this.instrumentation.processNewBlocks( - processDuration / publishedBlocks.length, - publishedBlocks.map(b => b.block), + processDuration / validBlocks.length, + validBlocks.map(b => b.block), ); } catch (err) { if (err instanceof InitialBlockNumberNotSequentialError) { @@ -799,7 +839,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem throw err; } - for (const block of publishedBlocks) { + for (const block of validBlocks) { this.log.info(`Downloaded L2 block ${block.block.number}`, { blockHash: await block.block.hash(), blockNumber: block.block.number, @@ -809,7 +849,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem archiveNextLeafIndex: block.block.archive.nextAvailableLeafIndex, }); } - lastRetrievedBlock = publishedBlocks.at(-1) ?? lastRetrievedBlock; + lastRetrievedBlock = validBlocks.at(-1) ?? lastRetrievedBlock; } while (searchEndBlock < currentL1BlockNumber); // Important that we update AFTER inserting the blocks. diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index cad3127aabd7..8270c2db6077 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -311,7 +311,7 @@ async function getBlockFromRollupTx( throw new Error(`Unexpected rollup method called ${rollupFunctionName}`); } - const [decodedArgs, attestations, _signers, _blobInput] = rollupArgs! as readonly [ + const [decodedArgs, packedAttestations, _signers, _blobInput] = rollupArgs! as readonly [ { archive: Hex; stateReference: ViemStateReference; @@ -326,6 +326,19 @@ async function getBlockFromRollupTx( Hex, ]; + const attestations = CommitteeAttestation.fromPacked(packedAttestations, targetCommitteeSize); + + logger.trace(`Recovered propose calldata from tx ${txHash}`, { + l2BlockNumber, + archive: decodedArgs.archive, + stateReference: decodedArgs.stateReference, + header: decodedArgs.header, + blobHashes, + attestations, + packedAttestations, + targetCommitteeSize, + }); + // TODO(md): why is the proposed block header different to the actual block header? // This is likely going to be a footgun const header = ProposedBlockHeader.fromViem(decodedArgs.header); @@ -359,7 +372,7 @@ async function getBlockFromRollupTx( stateReference, header, body, - attestations: CommitteeAttestation.fromPacked(attestations, targetCommitteeSize), + attestations, }; } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index 5627f3056af7..a70ac25e1a37 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -88,7 +88,7 @@ export class LogStore { acc.set(tag, currentLogs.concat(logs)); } return acc; - }); + }, new Map()); const tagsToUpdate = Array.from(taggedLogsToAdd.keys()); return this.db.transactionAsync(async () => { diff --git a/yarn-project/archiver/src/archiver/validation.test.ts b/yarn-project/archiver/src/archiver/validation.test.ts new file mode 100644 index 000000000000..bc630f258c09 --- /dev/null +++ b/yarn-project/archiver/src/archiver/validation.test.ts @@ -0,0 +1,91 @@ +import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; +import { times } from '@aztec/foundation/collection'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { EthAddress, L1PublishedData, L2Block, PublishedL2Block } from '@aztec/stdlib/block'; +import { orderAttestations } from '@aztec/stdlib/p2p'; +import { makeBlockAttestationFromBlock } from '@aztec/stdlib/testing'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { validateBlockAttestations } from './validation.js'; + +describe('validateBlockAttestations', () => { + let epochCache: MockProxy; + let signers: Secp256k1Signer[]; + let committee: EthAddress[]; + let logger: Logger; + + const makeBlock = async (signers: Secp256k1Signer[], committee: EthAddress[], slot?: number) => { + const block = await L2Block.random(slot ?? 1); + const blockAttestations = signers.map(signer => makeBlockAttestationFromBlock(block, signer)); + const attestations = orderAttestations(blockAttestations, committee); + return new PublishedL2Block(block, L1PublishedData.random(), attestations); + }; + + const constants = { epochDuration: 10 }; + + const setCommittee = (committee: EthAddress[]) => { + epochCache.getCommitteeForEpoch.mockResolvedValue({ committee } as EpochCommitteeInfo); + }; + + beforeEach(() => { + epochCache = mock(); + signers = times(5, () => Secp256k1Signer.random()); + committee = signers.map(signer => signer.address); + logger = createLogger('archiver:test'); + }); + + describe('with empty committee', () => { + beforeEach(() => { + setCommittee([]); + }); + + it('validates a block if no committee is found', async () => { + const block = await makeBlock([], []); + const result = await validateBlockAttestations(block, epochCache, constants, logger); + + expect(result).toBe(true); + expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n); + }); + + it('validates a block with no attestations if no committee is found', async () => { + const block = await makeBlock(signers, committee); + const result = await validateBlockAttestations(block, epochCache, constants, logger); + + expect(result).toBe(true); + expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n); + }); + }); + + describe('with committee', () => { + beforeEach(() => { + setCommittee(committee); + }); + + it('requests committee for the correct epoch', async () => { + const block = await makeBlock(signers, committee, 28); + await validateBlockAttestations(block, epochCache, constants, logger); + expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(2n); + }); + + it('fails if there is an attestation is from a non-committee member', async () => { + const badSigner = Secp256k1Signer.random(); + const block = await makeBlock([...signers, badSigner], [...committee, badSigner.address]); + const result = await validateBlockAttestations(block, epochCache, constants, logger); + expect(result).toBe(false); + }); + + it('returns false if insufficient attestations', async () => { + const block = await makeBlock(signers.slice(0, 2), committee); + const result = await validateBlockAttestations(block, epochCache, constants, logger); + expect(result).toBe(false); + }); + + it('returns true if all attestations are valid and sufficient', async () => { + const block = await makeBlock(signers.slice(0, 4), committee); + const result = await validateBlockAttestations(block, epochCache, constants, logger); + expect(result).toBe(true); + }); + }); +}); diff --git a/yarn-project/archiver/src/archiver/validation.ts b/yarn-project/archiver/src/archiver/validation.ts new file mode 100644 index 000000000000..da55170132c7 --- /dev/null +++ b/yarn-project/archiver/src/archiver/validation.ts @@ -0,0 +1,61 @@ +import type { EpochCache } from '@aztec/epoch-cache'; +import type { Logger } from '@aztec/foundation/log'; +import { type PublishedL2Block, getAttestationsFromPublishedL2Block } from '@aztec/stdlib/block'; +import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; + +/** + * Validates the attestations submitted for the given block. + * Returns true if the attestations are valid and sufficient, false otherwise. + */ +export async function validateBlockAttestations( + publishedBlock: Pick, + epochCache: EpochCache, + constants: Pick, + logger?: Logger, +): Promise { + const attestations = getAttestationsFromPublishedL2Block(publishedBlock); + const { block } = publishedBlock; + const blockHash = await block.hash().then(hash => hash.toString()); + const archiveRoot = block.archive.root.toString(); + const slot = block.header.getSlot(); + const epoch = getEpochAtSlot(slot, constants); + const { committee } = await epochCache.getCommitteeForEpoch(epoch); + const logData = { blockNumber: block.number, slot, epoch, blockHash, archiveRoot }; + logger?.debug(`Validating attestations for block ${block.number} at slot ${slot} in epoch ${epoch}`, { + committee: (committee ?? []).map(member => member.toString()), + recoveredAttestors: attestations.map(a => a.getSender().toString()), + postedAttestations: publishedBlock.attestations.map(a => + a.address.isZero() ? a.signature.toString() : a.address.toString(), + ), + ...logData, + }); + + if (!committee || committee.length === 0) { + // Q: Should we accept blocks with no committee? + logger?.warn(`No committee found for epoch ${epoch} at slot ${slot}. Accepting block without validation.`, logData); + return true; + } + + const committeeSet = new Set(committee.map(member => member.toString())); + const requiredAttestationCount = Math.floor((committee.length * 2) / 3) + 1; + + for (const attestation of attestations) { + const signer = attestation.getSender().toString(); + if (!committeeSet.has(signer)) { + logger?.warn(`Attestation from non-committee member ${signer} at slot ${slot}`, { committee }); + return false; + } + } + + if (attestations.length < requiredAttestationCount) { + logger?.warn(`Insufficient attestations for block at slot ${slot}`, { + requiredAttestations: requiredAttestationCount, + actualAttestations: attestations.length, + ...logData, + }); + return false; + } + + logger?.debug(`Block attestations validated successfully for block ${block.number} at slot ${slot}`, logData); + return true; +} diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index 8e350a374935..afc95f670be7 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -1,4 +1,3 @@ -import type { BlobSinkClientInterface } from '@aztec/blob-sink/client'; import { createLogger } from '@aztec/foundation/log'; import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; @@ -10,9 +9,8 @@ import type { L2BlockSourceEventEmitter } from '@aztec/stdlib/block'; import { type ContractClassPublic, computePublicBytecodeCommitment } from '@aztec/stdlib/contract'; import type { ArchiverApi, Service } from '@aztec/stdlib/interfaces/server'; import { getComponentsVersionsFromConfig } from '@aztec/stdlib/versioning'; -import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; -import { Archiver } from './archiver/archiver.js'; +import { Archiver, type ArchiverDeps } from './archiver/archiver.js'; import type { ArchiverConfig } from './archiver/config.js'; import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './archiver/kv_archiver_store/kv_archiver_store.js'; import { createArchiverClient } from './rpc/index.js'; @@ -41,13 +39,12 @@ export async function createArchiverStore( */ export async function createArchiver( config: ArchiverConfig & DataStoreConfig, - blobSinkClient: BlobSinkClientInterface, + deps: ArchiverDeps, opts: { blockUntilSync: boolean } = { blockUntilSync: true }, - telemetry: TelemetryClient = getTelemetryClient(), ): Promise { const archiverStore = await createArchiverStore(config); await registerProtocolContracts(archiverStore); - return Archiver.createAndSync(config, archiverStore, { telemetry, blobSinkClient }, opts.blockUntilSync); + return Archiver.createAndSync(config, archiverStore, deps, opts.blockUntilSync); } /** diff --git a/yarn-project/archiver/tsconfig.json b/yarn-project/archiver/tsconfig.json index a376e396483d..262058395971 100644 --- a/yarn-project/archiver/tsconfig.json +++ b/yarn-project/archiver/tsconfig.json @@ -15,6 +15,9 @@ { "path": "../constants" }, + { + "path": "../epoch-cache" + }, { "path": "../ethereum" }, diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index d159b22b9d08..5bb76ea3b989 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -234,7 +234,13 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // attempt snapshot sync if possible await trySnapshotSync(config, log); - const archiver = await createArchiver(config, blobSinkClient, { blockUntilSync: true }, telemetry); + const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config, { dateProvider }); + + const archiver = await createArchiver( + config, + { blobSinkClient, epochCache, telemetry, dateProvider }, + { blockUntilSync: true }, + ); // now create the merkle trees and the world state synchronizer const worldStateSynchronizer = await createWorldStateSynchronizer( @@ -249,8 +255,6 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { } const proofVerifier = new QueuedIVCVerifier(config, circuitVerifier); - const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config, { dateProvider }); - // create the tx pool and the p2p client, which will need the l2 block source const p2pClient = await createP2PClient( P2PClientType.Full, diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 6c71e9e5d72e..80f775b3f9ae 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -537,9 +537,7 @@ describe('e2e_synching', () => { const blobSinkClient = createBlobSinkClient({ blobSinkUrl: `http://localhost:${opts.blobSink?.port ?? DEFAULT_BLOB_SINK_PORT}`, }); - const archiver = await createArchiver(opts.config!, blobSinkClient, { - blockUntilSync: true, - }); + const archiver = await createArchiver(opts.config!, { blobSinkClient }, { blockUntilSync: true }); const pendingBlockNumber = await rollup.read.getPendingBlockNumber(); const worldState = await createWorldStateSynchronizer(opts.config!, archiver); diff --git a/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts b/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts index f4f354b927cf..999cd4f6febd 100644 --- a/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/fixtures/e2e_prover_test.ts @@ -276,7 +276,7 @@ export class FullProverTest { this.logger.verbose('Starting archiver for new prover node'); const archiver = await createArchiver( { ...this.context.aztecNodeConfig, dataDirectory: undefined }, - blobSinkClient, + { blobSinkClient }, { blockUntilSync: true }, ); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 13628b3c8e2e..e9d7e8bfd3e6 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -903,7 +903,7 @@ export function createAndSyncProverNode( // Creating temp store and archiver for simulated prover node const archiverConfig = { ...aztecNodeConfig, dataDirectory: proverNodeConfig.dataDirectory }; - const archiver = await createArchiver(archiverConfig, blobSinkClient, { blockUntilSync: true }); + const archiver = await createArchiver(archiverConfig, { blobSinkClient }, { blockUntilSync: true }); // Prover node config is for simulated proofs const proverConfig: ProverNodeConfig = { diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index ea1941ff386d..f70e949f134d 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -52,7 +52,11 @@ export async function createProverNode( await trySnapshotSync(config, log); - const archiver = deps.archiver ?? (await createArchiver(config, blobSinkClient, { blockUntilSync: true }, telemetry)); + const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config); + + const archiver = + deps.archiver ?? + (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true })); log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`); const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false }; @@ -76,8 +80,6 @@ export async function createProverNode( const l1TxUtils = deps.l1TxUtils ?? new L1TxUtils(l1Client, log, deps.dateProvider, config); const publisher = deps.publisher ?? new ProverNodePublisher(config, { telemetry, rollupContract, l1TxUtils }); - const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config); - const proofVerifier = new QueuedIVCVerifier( config, config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(), diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index d04459ccd66f..5c89258f680c 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -22,6 +22,7 @@ import { } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import type { BlockProposalOptions } from '@aztec/stdlib/p2p'; +import { orderAttestations } from '@aztec/stdlib/p2p'; import { pickFromSchema } from '@aztec/stdlib/schemas'; import type { L2BlockBuiltStats } from '@aztec/stdlib/stats'; import { MerkleTreeId } from '@aztec/stdlib/trees'; @@ -51,7 +52,7 @@ import { type Action, type SequencerPublisher, SignalType } from '../publisher/s import type { SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; import { SequencerTimetable, SequencerTooSlowError } from './timetable.js'; -import { SequencerState, type SequencerStateWithSlot, orderAttestations } from './utils.js'; +import { SequencerState, type SequencerStateWithSlot } from './utils.js'; export { SequencerState }; @@ -577,6 +578,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter { + // TODO(palla/sigs): We need to simulate the previous block being removed if invalid! await this.publisher.validateBlockHeader(proposalHeader); const blockNumber = newGlobalVariables.blockNumber; diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 4a8a13f0bbfd..04200b618d43 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -1,7 +1,3 @@ -import type { EthAddress } from '@aztec/foundation/eth-address'; -import { CommitteeAttestation } from '@aztec/stdlib/block'; -import type { BlockAttestation } from '@aztec/stdlib/p2p'; - export enum SequencerState { /** * Sequencer is stopped and not processing any txs from the pool. @@ -48,36 +44,3 @@ export type SequencerStateCallback = () => SequencerState; export function sequencerStateToNumber(state: SequencerState): number { return Object.values(SequencerState).indexOf(state); } - -/** Order Attestations - * - * Returns attestation signatures in the order of a series of provided ethereum addresses - * The rollup smart contract expects attestations to appear in the order of the committee - * - * @todo: perform this logic within the memory attestation store instead? - */ -export function orderAttestations( - attestations: BlockAttestation[], - orderAddresses: EthAddress[], -): CommitteeAttestation[] { - // Create a map of sender addresses to BlockAttestations - const attestationMap = new Map(); - - for (const attestation of attestations) { - const sender = attestation.getSender(); - if (sender) { - attestationMap.set( - sender.toString(), - CommitteeAttestation.fromAddressAndSignature(sender, attestation.signature), - ); - } - } - - // Create the ordered array based on the orderAddresses, else return an empty attestation - const orderedAttestations = orderAddresses.map(address => { - const addressString = address.toString(); - return attestationMap.get(addressString) || CommitteeAttestation.fromAddress(address); - }); - - return orderedAttestations; -} diff --git a/yarn-project/stdlib/src/block/proposal/committee_attestation.ts b/yarn-project/stdlib/src/block/proposal/committee_attestation.ts index e91eeec5b6a7..85f03b089b97 100644 --- a/yarn-project/stdlib/src/block/proposal/committee_attestation.ts +++ b/yarn-project/stdlib/src/block/proposal/committee_attestation.ts @@ -32,6 +32,10 @@ export class CommitteeAttestation { return new CommitteeAttestation(address, signature); } + static fromSignature(signature: Signature): CommitteeAttestation { + return new CommitteeAttestation(EthAddress.ZERO, signature); + } + static fromViem(viem: ViemCommitteeAttestation): CommitteeAttestation { return new CommitteeAttestation(EthAddress.fromString(viem.addr), Signature.fromViemSignature(viem.signature)); } diff --git a/yarn-project/stdlib/src/block/published_l2_block.ts b/yarn-project/stdlib/src/block/published_l2_block.ts index 41785fdd1167..7e00e3fe78ed 100644 --- a/yarn-project/stdlib/src/block/published_l2_block.ts +++ b/yarn-project/stdlib/src/block/published_l2_block.ts @@ -1,4 +1,6 @@ // Ignoring import issue to fix portable inferred type issue in zod schema +import { Buffer32 } from '@aztec/foundation/buffer'; +import { randomBigInt } from '@aztec/foundation/crypto'; import { schemas } from '@aztec/foundation/schemas'; import { z } from 'zod'; @@ -22,6 +24,14 @@ export class L1PublishedData { blockHash: z.string(), }); } + + static random() { + return new L1PublishedData( + randomBigInt(1000n) + 1n, + BigInt(Math.floor(Date.now() / 1000)), + Buffer32.random().toString(), + ); + } } export class PublishedL2Block { @@ -40,12 +50,11 @@ export class PublishedL2Block { } } -export function getAttestationsFromPublishedL2Block(block: PublishedL2Block) { +export function getAttestationsFromPublishedL2Block( + block: Pick, +): BlockAttestation[] { const payload = ConsensusPayload.fromBlock(block.block); return block.attestations .filter(attestation => !attestation.signature.isEmpty()) - .map( - attestation => - new BlockAttestation(block.block.header.globalVariables.blockNumber, payload, attestation.signature), - ); + .map(attestation => new BlockAttestation(block.block.number, payload, attestation.signature)); } diff --git a/yarn-project/stdlib/src/p2p/attestation_utils.ts b/yarn-project/stdlib/src/p2p/attestation_utils.ts new file mode 100644 index 000000000000..02e800981aae --- /dev/null +++ b/yarn-project/stdlib/src/p2p/attestation_utils.ts @@ -0,0 +1,35 @@ +import type { EthAddress } from '@aztec/foundation/eth-address'; + +import { CommitteeAttestation } from '../block/index.js'; +import type { BlockAttestation } from './block_attestation.js'; + +/** + * Returns attestation signatures in the order of a series of provided ethereum addresses + * The rollup smart contract expects attestations to appear in the order of the committee + * @todo: perform this logic within the memory attestation store instead? + */ +export function orderAttestations( + attestations: BlockAttestation[], + orderAddresses: EthAddress[], +): CommitteeAttestation[] { + // Create a map of sender addresses to BlockAttestations + const attestationMap = new Map(); + + for (const attestation of attestations) { + const sender = attestation.getSender(); + if (sender) { + attestationMap.set( + sender.toString(), + CommitteeAttestation.fromAddressAndSignature(sender, attestation.signature), + ); + } + } + + // Create the ordered array based on the orderAddresses, else return an empty attestation + const orderedAttestations = orderAddresses.map(address => { + const addressString = address.toString(); + return attestationMap.get(addressString) || CommitteeAttestation.fromAddress(address); + }); + + return orderedAttestations; +} diff --git a/yarn-project/stdlib/src/p2p/block_attestation.ts b/yarn-project/stdlib/src/p2p/block_attestation.ts index 8a02f3620093..92c34e1c2fd2 100644 --- a/yarn-project/stdlib/src/p2p/block_attestation.ts +++ b/yarn-project/stdlib/src/p2p/block_attestation.ts @@ -66,10 +66,9 @@ export class BlockAttestation extends Gossipable { return this.payload.header.slotNumber; } - /**Get sender - * - * Lazily evaluate and cache the sender of the attestation - * @returns The sender of the attestation + /** + * Lazily evaluate and cache the signer of the attestation + * @returns The signer of the attestation */ getSender(): EthAddress { if (!this.sender) { diff --git a/yarn-project/stdlib/src/p2p/index.ts b/yarn-project/stdlib/src/p2p/index.ts index 36218852b07c..6d5ae19b262b 100644 --- a/yarn-project/stdlib/src/p2p/index.ts +++ b/yarn-project/stdlib/src/p2p/index.ts @@ -1,3 +1,4 @@ +export * from './attestation_utils.js'; export * from './block_attestation.js'; export * from './block_proposal.js'; export * from './consensus_payload.js'; diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 45ce560be4a6..c105a3b5a4a9 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -291,6 +291,16 @@ export const makeBlockAttestation = (options?: MakeConsensusPayloadOptions): Blo return new BlockAttestation(blockNumber, payload, signature); }; +export const makeBlockAttestationFromBlock = (block: L2Block, signer?: Secp256k1Signer): BlockAttestation => { + return makeBlockAttestation({ + signer, + header: block.header, + archive: block.archive.root, + stateReference: block.header.state, + txHashes: block.body.txEffects.map(tx => tx.txHash), + }); +}; + export async function randomPublishedL2Block( l2BlockNumber: number, opts: { signers?: Secp256k1Signer[] } = {}, @@ -303,17 +313,7 @@ export async function randomPublishedL2Block( }; const signers = opts.signers ?? times(3, () => Secp256k1Signer.random()); - const atts = await Promise.all( - signers.map(signer => - makeBlockAttestation({ - signer, - header: block.header, - archive: block.archive.root, - stateReference: block.header.state, - txHashes: block.body.txEffects.map(tx => tx.txHash), - }), - ), - ); + const atts = await Promise.all(signers.map(signer => makeBlockAttestationFromBlock(block, signer))); const attestations = atts.map( (attestation, i) => new CommitteeAttestation(signers[i].address, attestation.signature), ); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 5c540152b028..e9ffcdc36c5c 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -88,6 +88,7 @@ __metadata: "@aztec/blob-lib": "workspace:^" "@aztec/blob-sink": "workspace:^" "@aztec/constants": "workspace:^" + "@aztec/epoch-cache": "workspace:^" "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^"