diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index 35f4e0958647..c24fa413311b 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -212,7 +212,7 @@ describe('Archiver Sync', () => { const expectedTotalNumLogs = (name: 'private' | 'public' | 'contractClass') => sum(block.body.txEffects.map(txEffect => txEffect[`${name}Logs`].length)); - const privateLogs = (await archiver.getBlock(blockNumber))!.toL2Block().getPrivateLogs(); + const privateLogs = (await archiver.getBlock(blockNumber))!.getPrivateLogs(); expect(privateLogs.length).toBe(expectedTotalNumLogs('private')); const publicLogs = (await archiver.getPublicLogs({ fromBlock: blockNumber, toBlock: blockNumber + 1 })).logs; diff --git a/yarn-project/archiver/src/modules/data_source_base.ts b/yarn-project/archiver/src/modules/data_source_base.ts index 78f84ca78967..3fc60f7edc64 100644 --- a/yarn-project/archiver/src/modules/data_source_base.ts +++ b/yarn-project/archiver/src/modules/data_source_base.ts @@ -4,14 +4,7 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; import { isDefined } from '@aztec/foundation/types'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { - type CheckpointedL2Block, - CommitteeAttestation, - L2Block, - type L2BlockNew, - type L2Tips, - PublishedL2Block, -} from '@aztec/stdlib/block'; +import { CheckpointedL2Block, CommitteeAttestation, L2BlockNew, type L2Tips } from '@aztec/stdlib/block'; import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; @@ -273,13 +266,13 @@ export abstract class ArchiverDataSourceBase return this.store.getBlocksForSlot(slotNumber); } - public async getBlocksForEpoch(epochNumber: EpochNumber): Promise { + public async getBlocksForEpoch(epochNumber: EpochNumber): Promise { if (!this.l1Constants) { throw new Error('L1 constants not set'); } const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); - const blocks: L2Block[] = []; + const blocks: L2BlockNew[] = []; // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. @@ -354,39 +347,33 @@ export abstract class ArchiverDataSourceBase return checkpoints.reverse(); } - public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise { + public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise { const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit); const provenCheckpointNumber = await this.store.getProvenCheckpointNumber(); const blocks = ( await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber))) ).filter(isDefined); - const olbBlocks: PublishedL2Block[] = []; + const publishedBlocks: CheckpointedL2Block[] = []; for (let i = 0; i < checkpoints.length; i++) { const blockForCheckpoint = blocks[i][0]; const checkpoint = checkpoints[i]; if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) { - // this checkpointisn't proven and we only want proven + // this checkpoint isn't proven and we only want proven continue; } - const oldCheckpoint = new Checkpoint( - blockForCheckpoint.archive, - checkpoint.header, - [blockForCheckpoint], + const publishedBlock = new CheckpointedL2Block( checkpoint.checkpointNumber, - ); - const oldBlock = L2Block.fromCheckpoint(oldCheckpoint); - const publishedBlock = new PublishedL2Block( - oldBlock, + blockForCheckpoint, checkpoint.l1, checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)), ); - olbBlocks.push(publishedBlock); + publishedBlocks.push(publishedBlock); } - return olbBlocks; + return publishedBlocks; } - public async getBlock(number: BlockNumber): Promise { + public async getBlock(number: BlockNumber): Promise { // If the number provided is -ve, then return the latest block. if (number < 0) { number = await this.store.getLatestBlockNumber(); @@ -394,50 +381,34 @@ export abstract class ArchiverDataSourceBase if (number === 0) { return undefined; } - const publishedBlocks = await this.getPublishedBlocks(number, 1); - if (publishedBlocks.length === 0) { - return undefined; + return this.store.getBlock(number); + } + + public async getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise { + const blocks = await this.store.getBlocks(from, limit); + + if (proven === true) { + const provenBlockNumber = await this.store.getProvenBlockNumber(); + return blocks.filter(b => b.number <= provenBlockNumber); } - return publishedBlocks[0].block; + return blocks; } - public async getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise { - const publishedBlocks = await this.getPublishedBlocks(from, limit, proven); - return publishedBlocks.map(x => x.block); + public getPublishedBlockByHash(blockHash: Fr): Promise { + return this.store.getCheckpointedBlockByHash(blockHash); } - public async getPublishedBlockByHash(blockHash: Fr): Promise { - const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash); - return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock); + public getPublishedBlockByArchive(archive: Fr): Promise { + return this.store.getCheckpointedBlockByArchive(archive); } - public async getPublishedBlockByArchive(archive: Fr): Promise { - const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive); - return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock); + public async getL2BlockNewByHash(blockHash: Fr): Promise { + const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash); + return checkpointedBlock?.block; } - private async buildOldBlockFromCheckpointedBlock( - checkpointedBlock: CheckpointedL2Block | undefined, - ): Promise { - if (!checkpointedBlock) { - return undefined; - } - const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber); - if (!checkpoint) { - return checkpoint; - } - const fullCheckpoint = new Checkpoint( - checkpointedBlock?.block.archive, - checkpoint?.header, - [checkpointedBlock.block], - checkpoint.checkpointNumber, - ); - const oldBlock = L2Block.fromCheckpoint(fullCheckpoint); - const published = new PublishedL2Block( - oldBlock, - checkpoint.l1, - checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)), - ); - return published; + public async getL2BlockNewByArchive(archive: Fr): Promise { + const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive); + return checkpointedBlock?.block; } } diff --git a/yarn-project/archiver/src/test/mock_archiver.ts b/yarn-project/archiver/src/test/mock_archiver.ts index 06d8def02777..55d4a242b657 100644 --- a/yarn-project/archiver/src/test/mock_archiver.ts +++ b/yarn-project/archiver/src/test/mock_archiver.ts @@ -1,6 +1,6 @@ import type { CheckpointNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/curves/bn254'; -import { L2Block, type L2BlockSource } from '@aztec/stdlib/block'; +import type { L2BlockSource } from '@aztec/stdlib/block'; import type { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; @@ -57,7 +57,7 @@ export class MockPrefilledArchiver extends MockArchiver { const fromBlock = this.l2Blocks.length; // TODO: Add L2 blocks and checkpoints separately once archiver has the apis for that. - this.addBlocks(this.prefilled.slice(fromBlock, fromBlock + numBlocks).map(c => L2Block.fromCheckpoint(c))); + this.addBlocks(this.prefilled.slice(fromBlock, fromBlock + numBlocks).flatMap(c => c.blocks)); return Promise.resolve(); } } diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 2f417269df69..9b3b48025642 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -9,15 +9,13 @@ import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CheckpointedL2Block, - L2Block, L2BlockHash, L2BlockNew, type L2BlockSource, type L2Tips, - PublishedL2Block, type ValidateCheckpointResult, } from '@aztec/stdlib/block'; -import { type Checkpoint, L1PublishedData } from '@aztec/stdlib/checkpoint'; +import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { EmptyL1RollupConstants, type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; import { type BlockHeader, TxHash, TxReceipt, TxStatus } from '@aztec/stdlib/tx'; @@ -27,7 +25,7 @@ import type { UInt64 } from '@aztec/stdlib/types'; * A mocked implementation of L2BlockSource to be used in tests. */ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { - protected l2Blocks: L2Block[] = []; + protected l2Blocks: L2BlockNew[] = []; private provenBlockNumber: number = 0; private finalizedBlockNumber: number = 0; @@ -38,14 +36,14 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { public async createBlocks(numBlocks: number) { for (let i = 0; i < numBlocks; i++) { const blockNum = this.l2Blocks.length + 1; - const block = await L2Block.random(BlockNumber(blockNum)); + const block = await L2BlockNew.random(BlockNumber(blockNum), { slotNumber: SlotNumber(blockNum) }); this.l2Blocks.push(block); } this.log.verbose(`Created ${numBlocks} blocks in the mock L2 block source`); } - public addBlocks(blocks: L2Block[]) { + public addBlocks(blocks: L2BlockNew[]) { this.l2Blocks.push(...blocks); this.log.verbose(`Added ${blocks.length} blocks to the mock L2 block source`); } @@ -108,7 +106,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { } const checkpointedBlock = new CheckpointedL2Block( CheckpointNumber(number), - block.toL2Block(), + block, new L1PublishedData(BigInt(number), BigInt(number), `0x${number.toString(16).padStart(64, '0')}`), [], ); @@ -139,8 +137,9 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { * @param number - The block number to return (inclusive). * @returns The requested L2 block. */ - public getBlock(number: number) { - return Promise.resolve(this.l2Blocks[number - 1]); + public getBlock(number: number): Promise { + const block = this.l2Blocks[number - 1]; + return Promise.resolve(block); } /** @@ -150,7 +149,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { */ public getL2BlockNew(number: BlockNumber): Promise { const block = this.l2Blocks[number - 1]; - return Promise.resolve(block?.toL2Block()); + return Promise.resolve(block); } /** @@ -159,7 +158,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { * @param limit - The maximum number of blocks to return. * @returns The requested mocked L2 blocks. */ - public getBlocks(from: number, limit: number, proven?: boolean) { + public getBlocks(from: number, limit: number, proven?: boolean): Promise { return Promise.resolve( this.l2Blocks .slice(from - 1, from - 1 + limit) @@ -167,37 +166,62 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { ); } - public async getPublishedCheckpoints(from: CheckpointNumber, limit: number) { - // TODO: Implement this properly. This only works when we have one block per checkpoint. - return (await this.getPublishedBlocks(from, limit)).map(block => block.toPublishedCheckpoint()); + public getPublishedCheckpoints(from: CheckpointNumber, limit: number) { + // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. + const blocks = this.l2Blocks.slice(from - 1, from - 1 + limit); + return Promise.all( + blocks.map(async block => { + // Create a checkpoint from the block - manually construct since L2BlockNew doesn't have toCheckpoint() + const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); + checkpoint.blocks = [block]; + return new PublishedCheckpoint( + checkpoint, + new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), + [], + ); + }), + ); } public async getCheckpointByArchive(archive: Fr): Promise { - // TODO: Implement this properly. This only works when we have one block per checkpoint. - return (await this.getPublishedBlockByArchive(archive))?.block.toCheckpoint(); + // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. + const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); + if (!block) { + return undefined; + } + // Create a checkpoint from the block - manually construct since L2BlockNew doesn't have toCheckpoint() + const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); + checkpoint.blocks = [block]; + return checkpoint; } - public async getPublishedBlocks(from: number, limit: number, proven?: boolean) { - const blocks = await this.getBlocks(from, limit, proven); - return blocks.map(block => - PublishedL2Block.fromFields({ - block, - l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), - attestations: [], - }), + public getPublishedBlocks(from: number, limit: number, proven?: boolean): Promise { + const blocks = this.l2Blocks + .slice(from - 1, from - 1 + limit) + .filter(b => !proven || this.provenBlockNumber === undefined || b.number <= this.provenBlockNumber); + return Promise.resolve( + blocks.map(block => + CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(block.number), + block, + l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), + attestations: [], + }), + ), ); } - async getL2BlocksNew(from: BlockNumber, limit: number, proven?: boolean): Promise { - const blocks = await this.getBlocks(from, limit, proven); - return blocks.map(x => x.toL2Block()); + getL2BlocksNew(from: BlockNumber, limit: number, proven?: boolean): Promise { + // getBlocks already returns L2BlockNew[], so just return directly + return this.getBlocks(from, limit, proven); } - public async getPublishedBlockByHash(blockHash: Fr): Promise { + public async getPublishedBlockByHash(blockHash: Fr): Promise { for (const block of this.l2Blocks) { const hash = await block.hash(); if (hash.equals(blockHash)) { - return PublishedL2Block.fromFields({ + return CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(block.number), block, l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), attestations: [], @@ -207,13 +231,14 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { return undefined; } - public getPublishedBlockByArchive(archive: Fr): Promise { + public getPublishedBlockByArchive(archive: Fr): Promise { const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); if (!block) { return Promise.resolve(undefined); } return Promise.resolve( - PublishedL2Block.fromFields({ + CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(block.number), block, l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), Buffer32.random().toString()), attestations: [], @@ -221,11 +246,26 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { ); } + public async getL2BlockNewByHash(blockHash: Fr): Promise { + for (const block of this.l2Blocks) { + const hash = await block.hash(); + if (hash.equals(blockHash)) { + return block; + } + } + return undefined; + } + + public getL2BlockNewByArchive(archive: Fr): Promise { + const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); + return Promise.resolve(block); + } + public async getBlockHeaderByHash(blockHash: Fr): Promise { for (const block of this.l2Blocks) { const hash = await block.hash(); if (hash.equals(blockHash)) { - return block.getBlockHeader(); + return block.header; } } return undefined; @@ -233,19 +273,32 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { public getBlockHeaderByArchive(archive: Fr): Promise { const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); - return Promise.resolve(block?.getBlockHeader()); + return Promise.resolve(block?.header); } getBlockHeader(number: number | 'latest'): Promise { - return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.getBlockHeader()); + return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.header); } getCheckpointsForEpoch(epochNumber: EpochNumber): Promise { - // TODO: Implement this properly. This only works when we have one block per checkpoint. - return this.getBlocksForEpoch(epochNumber).then(blocks => blocks.map(b => b.toCheckpoint())); + // TODO(mbps): Implement this properly. This only works when we have one block per checkpoint. + const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration; + const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration }); + const blocks = this.l2Blocks.filter(b => { + const slot = b.header.globalVariables.slotNumber; + return slot >= start && slot <= end; + }); + // Create checkpoints from blocks - manually construct since L2BlockNew doesn't have toCheckpoint() + return Promise.all( + blocks.map(async block => { + const checkpoint = await Checkpoint.random(block.checkpointNumber, { numBlocks: 1 }); + checkpoint.blocks = [block]; + return checkpoint; + }), + ); } - getBlocksForEpoch(epochNumber: EpochNumber): Promise { + getBlocksForEpoch(epochNumber: EpochNumber): Promise { const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration; const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration }); const blocks = this.l2Blocks.filter(b => { @@ -257,12 +310,12 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { getBlocksForSlot(slotNumber: SlotNumber): Promise { const blocks = this.l2Blocks.filter(b => b.header.globalVariables.slotNumber === slotNumber); - return Promise.resolve(blocks.map(b => b.toL2Block())); + return Promise.resolve(blocks); } async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise { const blocks = await this.getBlocksForEpoch(epochNumber); - return blocks.map(b => b.getBlockHeader()); + return blocks.map(b => b.header); } /** diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 3e0a8e7f1503..2f277c44c5d1 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -12,7 +12,7 @@ import { protocolContractsHash } from '@aztec/protocol-contracts'; import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice'; import type { GlobalVariableBuilder } from '@aztec/sequencer-client'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2Block, type L2BlockSource } from '@aztec/stdlib/block'; +import { L2BlockNew, type L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import { EmptyL1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { GasFees } from '@aztec/stdlib/gas'; @@ -332,32 +332,32 @@ describe('aztec node', () => { }); describe('getBlock', () => { - let block1: L2Block; - let block2: L2Block; + let block1: L2BlockNew; + let block2: L2BlockNew; beforeEach(() => { - block1 = L2Block.empty(); - block2 = L2Block.empty(); + block1 = L2BlockNew.empty(); + block2 = L2BlockNew.empty(); l2BlockSource.getBlockNumber.mockResolvedValue(BlockNumber(2)); }); it('returns requested block number', async () => { - l2BlockSource.getBlock.mockResolvedValue(block1); + l2BlockSource.getL2BlockNew.mockResolvedValue(block1); expect(await node.getBlock(BlockNumber(1))).toEqual(block1); - expect(l2BlockSource.getBlock).toHaveBeenCalledWith(BlockNumber(1)); + expect(l2BlockSource.getL2BlockNew).toHaveBeenCalledWith(BlockNumber(1)); }); it('returns latest block', async () => { - l2BlockSource.getBlock.mockResolvedValue(block2); + l2BlockSource.getL2BlockNew.mockResolvedValue(block2); expect(await node.getBlock('latest')).toEqual(block2); - expect(l2BlockSource.getBlock).toHaveBeenCalledWith(2); + expect(l2BlockSource.getL2BlockNew).toHaveBeenCalledWith(2); }); it('returns undefined for non-existent block', async () => { - l2BlockSource.getBlock.mockResolvedValue(undefined); + l2BlockSource.getL2BlockNew.mockResolvedValue(undefined); expect(await node.getBlock(BlockNumber(3))).toEqual(undefined); - expect(l2BlockSource.getBlock).toHaveBeenCalledWith(3); + expect(l2BlockSource.getL2BlockNew).toHaveBeenCalledWith(3); }); }); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index dcc1e78e0b09..27b141ccc42c 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -30,7 +30,7 @@ import { } from '@aztec/node-lib/factories'; import { type P2P, type P2PClientDeps, createP2PClient, getDefaultAllowedSetupFunctions } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { BlockBuilder, GlobalVariableBuilder, SequencerClient, type SequencerPublisher } from '@aztec/sequencer-client'; +import { GlobalVariableBuilder, SequencerClient, type SequencerPublisher } from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator/server'; import { AttestationsBlockWatcher, @@ -43,13 +43,11 @@ import { CollectionLimitsConfig, PublicSimulatorConfig } from '@aztec/stdlib/avm import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type BlockParameter, + type CheckpointedL2Block, type DataInBlock, - L2Block, L2BlockHash, - L2BlockHeader, L2BlockNew, type L2BlockSource, - type PublishedL2Block, } from '@aztec/stdlib/block'; import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { @@ -310,18 +308,10 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // We should really not be modifying the config object config.txPublicSetupAllowList = config.txPublicSetupAllowList ?? (await getDefaultAllowedSetupFunctions()); - // Create BlockBuilder for EpochPruneWatcher (slasher functionality) - const blockBuilder = new BlockBuilder( - { ...config, l1GenesisTime, slotDuration: Number(slotDuration) }, - worldStateSynchronizer, - archiver, - dateProvider, - telemetry, - ); - // Create FullNodeCheckpointsBuilder for validator and non-validator block proposal handling const validatorCheckpointsBuilder = new FullNodeCheckpointsBuilder( { ...config, l1GenesisTime, slotDuration: Number(slotDuration) }, + worldStateSynchronizer, archiver, dateProvider, telemetry, @@ -388,7 +378,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { archiver, epochCache, p2pClient.getTxProvider(), - blockBuilder, + validatorCheckpointsBuilder, config, ); watchers.push(epochPruneWatcher); @@ -453,6 +443,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // Create and start the sequencer client const checkpointsBuilder = new CheckpointsBuilder( { ...config, l1GenesisTime, slotDuration: Number(slotDuration) }, + worldStateSynchronizer, archiver, dateProvider, telemetry, @@ -580,39 +571,38 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return nodeInfo; } - public async getBlock(block: BlockParameter): Promise { + /** + * Get a block specified by its block number, block hash, or 'latest'. + * @param block - The block parameter (block number, block hash, or 'latest'). + * @returns The requested block. + */ + public async getBlock(block: BlockParameter): Promise { if (block instanceof L2BlockHash) { - const initialBlockHash = await this.#getInitialHeaderHash(); - if (block.equals(initialBlockHash)) { - // Block source doesn't handle initial header so we need to handle the case separately. - return this.buildInitialBlock(); - } - const blockHashFr = Fr.fromBuffer(block.toBuffer()); - const publishedBlock = await this.blockSource.getPublishedBlockByHash(blockHashFr); - return publishedBlock?.block; - } else { - const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber); - if (blockNumber === BlockNumber.ZERO) { - return this.buildInitialBlock(); - } - return await this.blockSource.getBlock(blockNumber); + return this.getBlockByHash(Fr.fromBuffer(block.toBuffer())); + } + const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber); + if (blockNumber === BlockNumber.ZERO) { + return this.buildInitialBlock(); } + return await this.blockSource.getL2BlockNew(blockNumber); } - private buildInitialBlock(): L2Block { + /** + * Get a block specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block. + */ + public async getBlockByHash(blockHash: Fr): Promise { + const initialBlockHash = await this.#getInitialHeaderHash(); + if (blockHash.equals(Fr.fromBuffer(initialBlockHash.toBuffer()))) { + return this.buildInitialBlock(); + } + return await this.blockSource.getL2BlockNewByHash(blockHash); + } + + private buildInitialBlock(): L2BlockNew { const initialHeader = this.worldStateSynchronizer.getCommitted().getInitialHeader(); - // TODO: (pw/mbps) Clean this up when we move completely to the new types. - // return L2BlockNew.empty(initialHeader); - const oldBlockHeader = L2BlockHeader.empty(); - oldBlockHeader.state.l1ToL2MessageTree.root = initialHeader.state.l1ToL2MessageTree.root; - oldBlockHeader.state.partial.noteHashTree.root = initialHeader.state.partial.noteHashTree.root; - oldBlockHeader.state.partial.nullifierTree.root = initialHeader.state.partial.nullifierTree.root; - oldBlockHeader.state.partial.nullifierTree.nextAvailableLeafIndex = - initialHeader.state.partial.nullifierTree.nextAvailableLeafIndex; - oldBlockHeader.state.partial.publicDataTree.root = initialHeader.state.partial.publicDataTree.root; - oldBlockHeader.state.partial.publicDataTree.nextAvailableLeafIndex = - initialHeader.state.partial.publicDataTree.nextAvailableLeafIndex; - return L2Block.empty(oldBlockHeader); + return L2BlockNew.empty(initialHeader); } /** @@ -620,9 +610,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { * @param archive - The archive root being requested. * @returns The requested block. */ - public async getBlockByArchive(archive: Fr): Promise { - const publishedBlock = await this.blockSource.getPublishedBlockByArchive(archive); - return publishedBlock?.block; + public async getBlockByArchive(archive: Fr): Promise { + return await this.blockSource.getL2BlockNewByArchive(archive); } /** @@ -631,11 +620,11 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { * @param limit - The maximum number of blocks to obtain. * @returns The blocks requested. */ - public async getBlocks(from: BlockNumber, limit: number): Promise { - return (await this.blockSource.getBlocks(from, limit)) ?? []; + public async getBlocks(from: BlockNumber, limit: number): Promise { + return (await this.blockSource.getL2BlocksNew(from, limit)) ?? []; } - public async getPublishedBlocks(from: BlockNumber, limit: number): Promise { + public async getPublishedBlocks(from: BlockNumber, limit: number): Promise { return (await this.blockSource.getPublishedBlocks(from, limit)) ?? []; } @@ -985,7 +974,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { public async getL2ToL1Messages(epoch: EpochNumber): Promise { // Assumes `getBlocksForEpoch` returns blocks in ascending order of block number. const blocks = await this.blockSource.getBlocksForEpoch(epoch); - const blocksInCheckpoints: L2Block[][] = []; + const blocksInCheckpoints: L2BlockNew[][] = []; let previousSlotNumber = SlotNumber.ZERO; let checkpointIndex = -1; for (const block of blocks) { diff --git a/yarn-project/aztec.js/src/api/block.ts b/yarn-project/aztec.js/src/api/block.ts index cd9d98c7057a..ea09a6851f02 100644 --- a/yarn-project/aztec.js/src/api/block.ts +++ b/yarn-project/aztec.js/src/api/block.ts @@ -1,2 +1,2 @@ -export { Body, L2Block } from '@aztec/stdlib/block'; +export { Body, L2BlockNew } from '@aztec/stdlib/block'; export { getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index 78b594c08ba4..135e35b14a74 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -180,7 +180,7 @@ describe('e2e_deploy_contract contract class registration', () => { // Contract instance deployed event is emitted via private logs. const blockNumber = await aztecNode.getBlockNumber(); - const logs = (await aztecNode.getBlock(blockNumber))!.toL2Block().getPrivateLogs(); + const logs = (await aztecNode.getBlock(blockNumber))!.getPrivateLogs(); expect(logs.length).toBe(1); diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 0c3062484f96..a2be82c97b67 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -189,10 +189,7 @@ describe('Logs', () => { .wait(); // Fetch raw private logs for that block and check tag uniqueness - const logs = (await aztecNode.getBlock(tx.blockNumber!))! - .toL2Block() - .getPrivateLogs() - .filter(l => !l.isEmpty()); + const logs = (await aztecNode.getBlock(tx.blockNumber!))!.getPrivateLogs().filter(l => !l.isEmpty()); expect(logs.length).toBe(tx1NumLogs); @@ -214,10 +211,7 @@ describe('Logs', () => { const blockNumber = tx.blockNumber!; // Fetch raw private logs for that block and check tag uniqueness - const logs = (await aztecNode.getBlock(blockNumber))! - .toL2Block() - .getPrivateLogs() - .filter(l => !l.isEmpty()); + const logs = (await aztecNode.getBlock(blockNumber))!.getPrivateLogs().filter(l => !l.isEmpty()); expect(logs.length).toBe(tx2NumLogs); diff --git a/yarn-project/end-to-end/src/e2e_keys.test.ts b/yarn-project/end-to-end/src/e2e_keys.test.ts index 1a2c74a7f232..935dba43dc74 100644 --- a/yarn-project/end-to-end/src/e2e_keys.test.ts +++ b/yarn-project/end-to-end/src/e2e_keys.test.ts @@ -1,6 +1,5 @@ import type { InitialAccountData } from '@aztec/accounts/testing'; import type { AztecAddress } from '@aztec/aztec.js/addresses'; -import type { L2Block } from '@aztec/aztec.js/block'; import { Fr } from '@aztec/aztec.js/fields'; import type { AztecNode } from '@aztec/aztec.js/node'; import type { Wallet } from '@aztec/aztec.js/wallet'; @@ -8,6 +7,7 @@ import { GeneratorIndex, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto/poseidon'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import type { L2BlockNew } from '@aztec/stdlib/block'; import { siloNullifier } from '@aztec/stdlib/hash'; import { computeAppNullifierSecretKey, @@ -93,11 +93,11 @@ describe('Keys', () => { const getNumNullifiedNotes = async (nskApp: Fr, contractAddress: AztecAddress) => { // 1. Get all the note hashes const blocks = await aztecNode.getBlocks(BlockNumber(INITIAL_L2_BLOCK_NUM), 1000); - const noteHashes = blocks.flatMap((block: L2Block) => + const noteHashes = blocks.flatMap((block: L2BlockNew) => block.body.txEffects.flatMap(txEffect => txEffect.noteHashes), ); // 2. Get all the seen nullifiers - const nullifiers = blocks.flatMap((block: L2Block) => + const nullifiers = blocks.flatMap((block: L2BlockNew) => block.body.txEffects.flatMap(txEffect => txEffect.nullifiers), ); // 3. Derive all the possible nullifiers using nskApp diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 79aada51f5d2..11644c83e773 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -1,7 +1,6 @@ import type { ArchiverDataSource } from '@aztec/archiver'; import { type AztecNodeConfig, getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress } from '@aztec/aztec.js/addresses'; -import type { L2Block } from '@aztec/aztec.js/block'; import { Fr } from '@aztec/aztec.js/fields'; import { createLogger } from '@aztec/aztec.js/log'; import { GlobalVariables } from '@aztec/aztec.js/tx'; @@ -44,29 +43,30 @@ import { TestDateProvider } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { ProtocolContractsList, protocolContractsHash } from '@aztec/protocol-contracts'; -import { buildBlockWithCleanDB } from '@aztec/prover-client/block-factory'; +import { LightweightCheckpointBuilder } from '@aztec/prover-client/light'; import { SequencerPublisher, SequencerPublisherMetrics } from '@aztec/sequencer-client'; import { CheckpointedL2Block, type CommitteeAttestation, CommitteeAttestationsAndSigners, + L2BlockNew, type L2Tips, - PublishedL2Block, Signature, } from '@aztec/stdlib/block'; -import { L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import { type L1RollupConstants, getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { GasFees, GasSettings } from '@aztec/stdlib/gas'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { SlashFactoryContract } from '@aztec/stdlib/l1-contracts'; import { orderAttestations } from '@aztec/stdlib/p2p'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { fr, makeAndSignCommitteeAttestationsAndSigners, - makeCheckpointAttestationFromBlock, + makeCheckpointAttestationFromCheckpoint, mockProcessedTx, } from '@aztec/stdlib/testing'; -import type { BlockHeader, ProcessedTx } from '@aztec/stdlib/tx'; +import type { BlockHeader, CheckpointGlobalVariables, ProcessedTx } from '@aztec/stdlib/tx'; import { type MerkleTreeAdminDatabase, NativeWorldStateService, @@ -122,7 +122,7 @@ describe('L1Publisher integration', () => { let minFee: GasFees; let blockSource: MockProxy; - let blocks: L2Block[] = []; + let blocks: L2BlockNew[] = []; const chainId = createEthereumChain(config.l1RpcUrls, config.l1ChainId).chainInfo.id; @@ -190,57 +190,64 @@ describe('L1Publisher integration', () => { return Promise.resolve(blocks.slice(from - 1, from - 1 + limit)); }, getPublishedBlocks(from, limit, _proven) { - return Promise.resolve( - blocks.slice(from - 1, from - 1 + limit).map(block => - PublishedL2Block.fromFields({ + const slicedBlocks = blocks.slice(from - 1, from - 1 + limit); + return Promise.all( + slicedBlocks.map(async block => + CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(block.number), attestations: [], block, // Use L2 block number and hash for faking the L1 info - l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), block.hash.toString()), + l1: new L1PublishedData(BigInt(block.number), BigInt(block.number), (await block.hash()).toString()), }), ), ); }, // Methods needed by L2BlockStream for world state sync getCheckpointedBlocks(from, limit, _proven) { - return Promise.resolve( - blocks - .slice(from - 1, from - 1 + limit) - .map( - block => - new CheckpointedL2Block( - CheckpointNumber(block.number), - block.toL2Block(), - new L1PublishedData(BigInt(block.number), BigInt(block.number), block.hash.toString()), - [], - ), - ), + const slicedBlocks = blocks.slice(from - 1, from - 1 + limit); + return Promise.all( + slicedBlocks.map( + async block => + new CheckpointedL2Block( + CheckpointNumber(block.number), + block, + new L1PublishedData(BigInt(block.number), BigInt(block.number), (await block.hash()).toString()), + [], + ), + ), ); }, - getPublishedCheckpoints(checkpointNumber, _limit) { + async getPublishedCheckpoints(checkpointNumber, _limit) { const block = blocks.find(b => Number(b.number) === Number(checkpointNumber)); if (!block) { return Promise.resolve([]); } - return Promise.resolve([ + const checkpoint = new Checkpoint( + block.archive, + CheckpointHeader.random({ lastArchiveRoot: block.header.lastArchive.root }), + [block], + CheckpointNumber(block.number), + ); + return [ new PublishedCheckpoint( - block.toCheckpoint(), - new L1PublishedData(BigInt(block.number), BigInt(block.number), block.hash.toString()), + checkpoint, + new L1PublishedData(BigInt(block.number), BigInt(block.number), (await block.hash()).toString()), [], ), - ]); + ]; }, - getL2Tips(): Promise { + async getL2Tips(): Promise { const latestBlock = blocks.at(-1); const blockId = latestBlock - ? { number: latestBlock.number, hash: latestBlock.hash.toString() } + ? { number: latestBlock.number, hash: (await latestBlock.hash()).toString() } : { number: BlockNumber.ZERO, hash: GENESIS_BLOCK_HEADER_HASH.toString() }; const tipId = { block: blockId, checkpoint: { number: CheckpointNumber(blockId.number), hash: blockId.hash }, }; - return Promise.resolve({ proposed: blockId, checkpointed: tipId, proven: tipId, finalized: tipId }); + return { proposed: blockId, checkpointed: tipId, proven: tipId, finalized: tipId }; }, getBlockNumber(): Promise { return Promise.resolve(BlockNumber(blocks.at(-1)?.number ?? BlockNumber.ZERO)); @@ -337,15 +344,45 @@ describe('L1Publisher integration', () => { ({ msgHash }) => msgHash, ); - const buildBlock = async (globalVariables: GlobalVariables, txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { + /** + * Build a checkpoint with a single block using the LightweightCheckpointBuilder. + * This properly computes all checkpoint header fields (blobsHash, blockHeadersHash, inHash, epochOutHash, etc.) + */ + const buildCheckpoint = async ( + globalVariables: GlobalVariables, + txs: ProcessedTx[], + l1ToL2Messages: Fr[], + previousCheckpointOutHashes: Fr[] = [], + ): Promise => { await worldStateSynchronizer.syncImmediate(); const tempFork = await worldStateSynchronizer.fork(BlockNumber(globalVariables.blockNumber - 1)); - const block = await buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, tempFork); + + const checkpointConstants: CheckpointGlobalVariables = { + chainId: globalVariables.chainId, + version: globalVariables.version, + slotNumber: globalVariables.slotNumber, + coinbase: globalVariables.coinbase, + feeRecipient: globalVariables.feeRecipient, + gasFees: globalVariables.gasFees, + }; + + const checkpointNumber = CheckpointNumber.fromBlockNumber(globalVariables.blockNumber); + const builder = await LightweightCheckpointBuilder.startNewCheckpoint( + checkpointNumber, + checkpointConstants, + l1ToL2Messages, + previousCheckpointOutHashes, + tempFork, + ); + + await builder.addBlock(globalVariables, txs, { insertTxsEffects: true }); + const checkpoint = await builder.completeCheckpoint(); + await tempFork.close(); - return block; + return checkpoint; }; - const buildSingleBlock = async (opts: { l1ToL2Messages?: Fr[]; blockNumber?: BlockNumber } = {}) => { + const buildSingleCheckpoint = async (opts: { l1ToL2Messages?: Fr[]; blockNumber?: BlockNumber } = {}) => { const l1ToL2Messages = opts.l1ToL2Messages ?? new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(Fr.ZERO); const txs = await Promise.all([makeProcessedTx(0x1000), makeProcessedTx(0x2000)]); @@ -362,9 +399,9 @@ describe('L1Publisher integration', () => { feeRecipient, new GasFees(0, await rollup.getManaMinFeeAt(timestamp, true)), ); - const block = await buildBlock(globalVariables, txs, l1ToL2Messages); + const checkpoint = await buildCheckpoint(globalVariables, txs, l1ToL2Messages); blockSource.getL1ToL2Messages.mockResolvedValueOnce(l1ToL2Messages); - return block; + return { checkpoint, l1ToL2Messages }; }; describe('block building', () => { @@ -420,17 +457,17 @@ describe('L1Publisher integration', () => { new GasFees(0, await rollup.getManaMinFeeAt(timestamp, true)), ); - const block = await buildBlock(globalVariables, txs, currentL1ToL2Messages); + const checkpoint = await buildCheckpoint(globalVariables, txs, currentL1ToL2Messages); + const block = checkpoint.blocks[0]; const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.totalGas.l2Gas)), Fr.ZERO); expect(totalManaUsed.toBigInt()).toEqual(block.header.totalManaUsed.toBigInt()); - prevHeader = block.getBlockHeader(); + prevHeader = block.header; blockSource.getL1ToL2Messages.mockResolvedValueOnce(currentL1ToL2Messages); - const checkpointBlobFields = block.getCheckpointBlobFields(); + const checkpointBlobFields = checkpoint.toBlobFields(); const blockBlobs = getBlobsPerL1Block(checkpointBlobFields); - expect(block.header.blobsHash).toEqual(sha256ToField(blockBlobs.map(b => b.getEthVersionedBlobHash()))); let prevBlobAccumulatorHash = (await rollup.getCurrentBlobCommitmentsHash()).toBuffer(); @@ -451,7 +488,7 @@ describe('L1Publisher integration', () => { ); await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), + checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty(), ); @@ -492,7 +529,7 @@ describe('L1Publisher integration', () => { functionName: 'propose', args: [ { - header: block.getCheckpointHeader().toViem(), + header: checkpoint.header.toViem(), archive: `0x${block.archive.root.toBuffer().toString('hex')}`, oracleInput: { feeAssetPriceModifier: 0n, @@ -556,9 +593,13 @@ describe('L1Publisher integration', () => { }); }); - const expectPublishBlock = async (block: L2Block, attestations: CommitteeAttestation[], signature: Signature) => { + const expectPublishCheckpoint = async ( + checkpoint: Checkpoint, + attestations: CommitteeAttestation[], + signature: Signature, + ) => { await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), + checkpoint, new CommitteeAttestationsAndSigners(attestations), signature, ); @@ -568,14 +609,15 @@ describe('L1Publisher integration', () => { }; it('publishes a block with attestations', async () => { - const block = await buildSingleBlock(); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; - const blockAttestations = validators.map(v => makeCheckpointAttestationFromBlock(block, v)); - const attestations = orderAttestations(blockAttestations, committee!); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); - await publisher.validateBlockHeader(block.getCheckpointHeader()); + await publisher.validateBlockHeader(checkpoint.header); const proposerSigner = validators.find(v => v.address.equals(proposer!)); @@ -585,34 +627,36 @@ describe('L1Publisher integration', () => { proposerSigner!, ); - await expectPublishBlock(block, attestations, attestationsAndSignersSignature); + await expectPublishCheckpoint(checkpoint, attestations, attestationsAndSignersSignature); }); it('fails to publish a block without the proposer attestation', async () => { - const block = await buildSingleBlock(); - const blockAttestations = validators.map(v => makeCheckpointAttestationFromBlock(block, v)); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; + const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); // Reverse attestations to break proposer attestation - const attestations = orderAttestations(blockAttestations, committee!).reverse(); + const attestations = orderAttestations(checkpointAttestations, committee!).reverse(); const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); - await publisher.validateBlockHeader(block.getCheckpointHeader()); + await publisher.validateBlockHeader(checkpoint.header); await expect( - publisher.enqueueProposeCheckpoint(block.toCheckpoint(), attestationsAndSigners, Signature.empty()), + publisher.enqueueProposeCheckpoint(checkpoint, attestationsAndSigners, Signature.empty()), ).rejects.toThrow(/ValidatorSelection__InvalidCommitteeCommitment/); }); it('rejects flipped proposer signature', async () => { - const block = await buildSingleBlock(); - const blockAttestations = validators.map(v => makeCheckpointAttestationFromBlock(block, v)); - const attestations = orderAttestations(blockAttestations, committee!); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; + const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); - await publisher.validateBlockHeader(block.getCheckpointHeader()); + await publisher.validateBlockHeader(checkpoint.header); const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( @@ -622,7 +666,7 @@ describe('L1Publisher integration', () => { await expect( publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), + checkpoint, attestationsAndSigners, flipSignature(attestationsAndSignersSignature), ), @@ -630,13 +674,14 @@ describe('L1Publisher integration', () => { }); it('rejects signature with invalid recovery value', async () => { - const block = await buildSingleBlock(); - const blockAttestations = validators.map(v => makeCheckpointAttestationFromBlock(block, v)); - const attestations = orderAttestations(blockAttestations, committee!); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; + const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); - await publisher.validateBlockHeader(block.getCheckpointHeader()); + await publisher.validateBlockHeader(checkpoint.header); const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( @@ -650,19 +695,20 @@ describe('L1Publisher integration', () => { const wrongV = attestationsAndSignersSignature.v - 27; const wrongSig = new Signature(attestationsAndSignersSignature.r, attestationsAndSignersSignature.s, wrongV); - await expect( - publisher.enqueueProposeCheckpoint(block.toCheckpoint(), attestationsAndSigners, wrongSig), - ).rejects.toThrow(/ECDSAInvalidSignature/); + await expect(publisher.enqueueProposeCheckpoint(checkpoint, attestationsAndSigners, wrongSig)).rejects.toThrow( + /ECDSAInvalidSignature/, + ); }); it('publishes a block invalidating the previous one', async () => { - const badBlock = await buildSingleBlock(); + const { checkpoint: badCheckpoint } = await buildSingleCheckpoint(); + const badBlock = badCheckpoint.blocks[0]; // Publish the first invalid block - const badBlockAttestations = validators + const badCheckpointAttestations = validators .filter(v => v.address.equals(proposer!)) - .map(v => makeCheckpointAttestationFromBlock(badBlock, v)); - const badAttestations = orderAttestations(badBlockAttestations, committee!); + .map(v => makeCheckpointAttestationFromCheckpoint(badCheckpoint, v)); + const badAttestations = orderAttestations(badCheckpointAttestations, committee!); const badAttestationsAndSigners = new CommitteeAttestationsAndSigners(badAttestations); const badAttestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( @@ -670,7 +716,7 @@ describe('L1Publisher integration', () => { validators.find(v => v.address.equals(proposer!))!, ); - await expectPublishBlock(badBlock, badAttestations, badAttestationsAndSignersSignature); + await expectPublishCheckpoint(badCheckpoint, badAttestations, badAttestationsAndSignersSignature); await progressTimeBySlot(); logger.warn(`Published bad block ${badBlock.number} with archive root ${badBlock.archive.root}`); @@ -679,17 +725,18 @@ describe('L1Publisher integration', () => { ({ currentProposer: proposer } = await epochCache.getProposerAttesterAddressInCurrentOrNextSlot()); // Prepare for invalidating the previous one and publish the same block with proper attestations - const block = await buildSingleBlock({ blockNumber: BlockNumber(1) }); + const { checkpoint } = await buildSingleCheckpoint({ blockNumber: BlockNumber(1) }); + const block = checkpoint.blocks[0]; expect(block.number).toEqual(badBlock.number); - const blockAttestations = validators.map(v => makeCheckpointAttestationFromBlock(block, v)); - const attestations = orderAttestations(blockAttestations, committee!); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const attestations = orderAttestations(checkpointAttestations, committee!); // Check we can invalidate the checkpoint logger.warn('Checking simulate invalidate checkpoint'); const invalidateRequest = await publisher.simulateInvalidateCheckpoint({ valid: false, committee: committee!, - checkpoint: block.toCheckpoint().toCheckpointInfo(), + checkpoint: checkpoint.toCheckpointInfo(), attestors: [], attestations: badAttestations, epoch: EpochNumber(1), @@ -709,10 +756,8 @@ describe('L1Publisher integration', () => { // Same for validation logger.warn('Checking validate block header'); - await expect(publisher.validateBlockHeader(block.getCheckpointHeader())).rejects.toThrow( - /Rollup__InvalidArchive/, - ); - await publisher.validateBlockHeader(block.getCheckpointHeader(), { + await expect(publisher.validateBlockHeader(checkpoint.header)).rejects.toThrow(/Rollup__InvalidArchive/); + await publisher.validateBlockHeader(checkpoint.header, { forcePendingCheckpointNumber: forcePendingCheckpointNumber ?? CheckpointNumber.ZERO, }); @@ -726,14 +771,9 @@ describe('L1Publisher integration', () => { // Invalidate and propose logger.warn('Enqueuing requests to invalidate and propose the checkpoint'); publisher.enqueueInvalidateCheckpoint(invalidateRequest); - await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), - attestationsAndSigners, - attestationsAndSignersSignature, - { - forcePendingCheckpointNumber: forcePendingCheckpointNumber ?? CheckpointNumber.ZERO, - }, - ); + await publisher.enqueueProposeCheckpoint(checkpoint, attestationsAndSigners, attestationsAndSignersSignature, { + forcePendingCheckpointNumber: forcePendingCheckpointNumber ?? CheckpointNumber.ZERO, + }); const result = await publisher.sendRequests(); expect(result!.successfulActions).toEqual(['invalidate-by-insufficient-attestations', 'propose']); expect(result!.failedActions).toEqual([]); @@ -746,13 +786,10 @@ describe('L1Publisher integration', () => { }); it(`succeeds proposing new block when vote fails`, async () => { - const block = await buildSingleBlock(); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; - await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), - CommitteeAttestationsAndSigners.empty(), - Signature.empty(), - ); + await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); await publisher.enqueueGovernanceCastSignal( l1ContractAddresses.rollupAddress, block.slot, @@ -772,16 +809,12 @@ describe('L1Publisher integration', () => { // INBOX.consume does not match the header.inHash and we get a Rollup__BlobHash that is not caught by // validateHeader before. const l1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(1n)); - const block = await buildSingleBlock({ l1ToL2Messages }); + const { checkpoint } = await buildSingleCheckpoint({ l1ToL2Messages }); // Expect the simulation to fail const loggerErrorSpy = jest.spyOn((publisher as any).log, 'error'); await expect( - publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), - CommitteeAttestationsAndSigners.empty(), - Signature.empty(), - ), + publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()), ).rejects.toThrow(/Rollup__InvalidInHash/); expect(loggerErrorSpy).toHaveBeenNthCalledWith( 2, @@ -806,10 +839,10 @@ describe('L1Publisher integration', () => { initialL2Slot = BigInt(await rollup.getSlotNumber()); }); - const getProposeTxTimeoutAt = (block: L2Block) => { + const getProposeTxTimeoutAt = (checkpoint: Checkpoint) => { const { slotDuration: aztecSlotDuration } = l1Constants; const txTimeoutAt = new Date( - (Number(getSlotStartBuildTimestamp(block.slot, l1Constants)) + Number(aztecSlotDuration)) * 1000, + (Number(getSlotStartBuildTimestamp(checkpoint.slot, l1Constants)) + Number(aztecSlotDuration)) * 1000, ); logger.warn(`Setting tx timeout at ${txTimeoutAt.toISOString()} (${txTimeoutAt.getTime()})`); return txTimeoutAt; @@ -828,20 +861,15 @@ describe('L1Publisher integration', () => { await retryUntil(() => ethCheatCodes.getTxPoolStatus().then(s => s.pending > 0), 'tx sent', 20, 0.1); }; - const enqueueProposeL2Block = async (block: L2Block) => { - await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), - CommitteeAttestationsAndSigners.empty(), - Signature.empty(), - { - txTimeoutAt: getProposeTxTimeoutAt(block), - }, - ); + const enqueueProposeL2Checkpoint = async (checkpoint: Checkpoint) => { + await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty(), { + txTimeoutAt: getProposeTxTimeoutAt(checkpoint), + }); }; it(`cancels block proposal when the L2 slot ends`, async () => { - const block = await buildSingleBlock(); - await enqueueProposeL2Block(block); + const { checkpoint } = await buildSingleCheckpoint(); + await enqueueProposeL2Checkpoint(checkpoint); await sendRequests(); // Advance one L1 block at a time without mining the publish tx. @@ -878,8 +906,9 @@ describe('L1Publisher integration', () => { }); it(`speeds up block proposal if not mined`, async () => { - const block = await buildSingleBlock(); - await enqueueProposeL2Block(block); + const { checkpoint } = await buildSingleCheckpoint(); + const block = checkpoint.blocks[0]; + await enqueueProposeL2Checkpoint(checkpoint); await sendRequests(); const [initialTx] = await ethCheatCodes.getTxPoolContents(); @@ -912,8 +941,8 @@ describe('L1Publisher integration', () => { }); it(`can send two consecutive proposals if the first one times out`, async () => { - const block1 = await buildSingleBlock(); - await enqueueProposeL2Block(block1); + const { checkpoint: checkpoint1 } = await buildSingleCheckpoint(); + await enqueueProposeL2Checkpoint(checkpoint1); await sendRequests(); const [initialTx] = await ethCheatCodes.getTxPoolContents(); @@ -940,10 +969,11 @@ describe('L1Publisher integration', () => { expect(await ethCheatCodes.getTxPoolStatus()).toEqual({ pending: 1, queued: 0 }); // Now we should be able to send a second proposal - const block2 = await buildSingleBlock({ blockNumber: BlockNumber(1) }); + const { checkpoint: checkpoint2 } = await buildSingleCheckpoint({ blockNumber: BlockNumber(1) }); + const block2 = checkpoint2.blocks[0]; expect(BigInt(block2.slot)).toEqual(initialL2Slot + 1n); sendRequestsResult = undefined; - await enqueueProposeL2Block(block2); + await enqueueProposeL2Checkpoint(checkpoint2); await sendRequests(); // Wait for the new proposal to be sent to the pool diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/write_json.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/write_json.ts index 8ed26133eabb..f0534b911eb1 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/write_json.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/write_json.ts @@ -1,8 +1,9 @@ import { AztecAddress } from '@aztec/aztec.js/addresses'; -import type { L2Block } from '@aztec/aztec.js/block'; import { Fr } from '@aztec/aztec.js/fields'; import { BatchedBlob, Blob, getEthBlobEvaluationInputs, getPrefixedEthBlobCommitments } from '@aztec/blob-lib'; import { EthAddress } from '@aztec/foundation/eth-address'; +import { L2BlockNew } from '@aztec/stdlib/block'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { writeFile } from 'fs/promises'; @@ -14,7 +15,7 @@ const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; */ export async function writeJson( fileName: string, - block: L2Block, + block: L2BlockNew, l1ToL2Content: Fr[], blobs: Blob[], batchedBlob: BatchedBlob, @@ -32,6 +33,12 @@ export async function writeJson( return `0x${buffer.toString('hex').padStart(size, '0')}`; }; + // Create a checkpoint header for this block + const checkpointHeader = CheckpointHeader.random({ + slotNumber: block.slot, + timestamp: block.timestamp, + }); + const jsonObject = { populate: { l1ToL2Content: l1ToL2Content.map(value => asHex(value)), @@ -50,22 +57,22 @@ export async function writeJson( checkpointNumber: block.number, body: `0x${block.body.toBuffer().toString('hex')}`, header: { - lastArchiveRoot: asHex(block.header.lastArchive.root), - blockHeadersHash: asHex(block.header.blockHeadersHash), - blobsHash: asHex(block.header.blobsHash), - inHash: asHex(block.header.inHash), - outHash: asHex(block.header.epochOutHash), - slotNumber: Number(block.header.globalVariables.slotNumber), - timestamp: Number(block.header.globalVariables.timestamp), - coinbase: asHex(block.header.globalVariables.coinbase, 40), - feeRecipient: asHex(block.header.globalVariables.feeRecipient), + lastArchiveRoot: asHex(checkpointHeader.lastArchiveRoot), + blockHeadersHash: asHex(checkpointHeader.blockHeadersHash), + blobsHash: asHex(checkpointHeader.blobsHash), + inHash: asHex(checkpointHeader.inHash), + outHash: asHex(checkpointHeader.epochOutHash), + slotNumber: Number(checkpointHeader.slotNumber), + timestamp: Number(checkpointHeader.timestamp), + coinbase: asHex(checkpointHeader.coinbase, 40), + feeRecipient: asHex(checkpointHeader.feeRecipient), gasFees: { - feePerDaGas: Number(block.header.globalVariables.gasFees.feePerDaGas), - feePerL2Gas: Number(block.header.globalVariables.gasFees.feePerL2Gas), + feePerDaGas: Number(checkpointHeader.gasFees.feePerDaGas), + feePerL2Gas: Number(checkpointHeader.gasFees.feePerL2Gas), }, - totalManaUsed: block.header.totalManaUsed.toNumber(), + totalManaUsed: checkpointHeader.totalManaUsed.toNumber(), }, - headerHash: asHex(block.getCheckpointHeader().hash()), + headerHash: asHex(checkpointHeader.hash()), numTxs: block.body.txEffects.length, }, }; diff --git a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts index cced9de0c773..a4ff7784b50d 100644 --- a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts @@ -12,7 +12,7 @@ import { createExtendedL1Client } from '@aztec/ethereum/client'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { RollupContract } from '@aztec/ethereum/contracts'; import type { DeployAztecL1ContractsReturnType } from '@aztec/ethereum/deploy-aztec-l1-contracts'; -import { EpochNumber } from '@aztec/foundation/branded-types'; +import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; import { Signature } from '@aztec/foundation/eth-signature'; import { retryUntil } from '@aztec/foundation/retry'; @@ -129,9 +129,9 @@ describe('e2e_multi_validator_node', () => { expect(tx.blockNumber).toBeDefined(); const dataStore = (aztecNode as AztecNodeService).getBlockSource() as Archiver; - const [block] = await dataStore.getPublishedBlocks(tx.blockNumber!, tx.blockNumber!); - const payload = new ConsensusPayload(block.block.header.toCheckpointHeader(), block.block.archive.root); - const attestations = block.attestations + const [publishedCheckpoint] = await dataStore.getPublishedCheckpoints(CheckpointNumber(tx.blockNumber!), 1); + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); @@ -192,9 +192,9 @@ describe('e2e_multi_validator_node', () => { expect(tx.blockNumber).toBeDefined(); const dataStore = (aztecNode as AztecNodeService).getBlockSource() as Archiver; - const [block] = await dataStore.getPublishedBlocks(tx.blockNumber!, tx.blockNumber!); - const payload = new ConsensusPayload(block.block.header.toCheckpointHeader(), block.block.archive.root); - const attestations = block.attestations + const [publishedCheckpoint] = await dataStore.getPublishedCheckpoints(CheckpointNumber(tx.blockNumber!), 1); + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); diff --git a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts index 9bd91976c1d4..967c886b596c 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts @@ -5,6 +5,7 @@ import { Fr } from '@aztec/aztec.js/fields'; import type { Logger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; import type { Wallet } from '@aztec/aztec.js/wallet'; +import { encodeCheckpointBlobDataFromBlocks } from '@aztec/blob-lib/encoding'; import { FIELDS_PER_BLOB } from '@aztec/constants'; import { AvmTestContract } from '@aztec/noir-test-contracts.js/AvmTest'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; @@ -91,7 +92,7 @@ describe('e2e_multiple_blobs', () => { const block = (await aztecNode.getBlock(blockNumber))!; - const numBlobFields = block.getCheckpointBlobFields().length; + const numBlobFields = encodeCheckpointBlobDataFromBlocks([block.toBlockBlobData()]).length; const numBlobs = Math.ceil(numBlobFields / FIELDS_PER_BLOB); logger.info( `Block ${blockNumber} has ${provenTxs.length} txs, which produce ${numBlobFields} blob fields in ${numBlobs} blobs.`, diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 38db05ea6071..f707b781809d 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -1,6 +1,7 @@ import type { Archiver } from '@aztec/archiver'; import type { AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import { SentTx } from '@aztec/aztec.js/contracts'; +import { CheckpointNumber } from '@aztec/foundation/branded-types'; import { Signature } from '@aztec/foundation/eth-signature'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; @@ -187,9 +188,9 @@ describe('e2e_p2p_network', () => { // Gather signers from attestations downloaded from L1 const blockNumber = await txsSentViaDifferentNodes[0][0].getReceipt().then(r => r.blockNumber!); const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; - const [block] = await dataStore.getPublishedBlocks(blockNumber, blockNumber); - const payload = new ConsensusPayload(block.block.header.toCheckpointHeader(), block.block.archive.root); - const attestations = block.attestations + const [publishedCheckpoint] = await dataStore.getPublishedCheckpoints(CheckpointNumber(blockNumber), 1); + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts index daae5f3dfcd6..75c3e5678512 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts @@ -5,7 +5,7 @@ import { SentTx } from '@aztec/aztec.js/contracts'; import { Fr } from '@aztec/aztec.js/fields'; import { addL1Validator } from '@aztec/cli/l1/validators'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { EpochNumber } from '@aztec/foundation/branded-types'; +import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { MockZKPassportVerifierAbi } from '@aztec/l1-artifacts/MockZKPassportVerifierAbi'; @@ -223,9 +223,9 @@ describe('e2e_p2p_network', () => { // Gather signers from attestations downloaded from L1 const blockNumber = await txsSentViaDifferentNodes[0][0].getReceipt().then(r => r.blockNumber!); const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; - const [block] = await dataStore.getPublishedBlocks(blockNumber, blockNumber); - const payload = new ConsensusPayload(block.block.header.toCheckpointHeader(), block.block.archive.root); - const attestations = block.attestations + const [publishedCheckpoint] = await dataStore.getPublishedCheckpoints(CheckpointNumber(blockNumber), 1); + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts index 3bc198460c8d..7d68f9e2343b 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts @@ -1,6 +1,7 @@ import type { Archiver } from '@aztec/archiver'; import type { AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import { SentTx } from '@aztec/aztec.js/contracts'; +import { CheckpointNumber } from '@aztec/foundation/branded-types'; import { Signature } from '@aztec/foundation/eth-signature'; import { retryUntil } from '@aztec/foundation/retry'; import { ENR, type P2PClient, type P2PService, type PeerId } from '@aztec/p2p'; @@ -357,9 +358,9 @@ describe('e2e_p2p_preferred_network', () => { // Gather signers from attestations downloaded from L1 const blockNumber = await txsSentViaDifferentNodes[0][0].getReceipt().then(r => r.blockNumber!); const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; - const [block] = await dataStore.getPublishedBlocks(blockNumber, blockNumber); - const payload = new ConsensusPayload(block.block.header.toCheckpointHeader(), block.block.archive.root); - const attestations = block.attestations + const [publishedCheckpoint] = await dataStore.getPublishedCheckpoints(CheckpointNumber(blockNumber), 1); + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 2afda2cc510e..6dc4301150d7 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -6,7 +6,6 @@ import { times } from '@aztec/foundation/collection'; import { sleep } from '@aztec/foundation/sleep'; import { unfreeze } from '@aztec/foundation/types'; import type { LibP2PService, P2PClient } from '@aztec/p2p'; -import type { BlockBuilder } from '@aztec/sequencer-client'; import type { CppPublicTxSimulator, PublicTxResult } from '@aztec/simulator/server'; import { BlockProposal } from '@aztec/stdlib/p2p'; import { ReExFailedTxsError, ReExStateMismatchError, ReExTimeoutError } from '@aztec/stdlib/validators'; @@ -170,30 +169,28 @@ describe('e2e_p2p_reex', () => { node: AztecNodeService, stub: (tx: Tx, originalSimulate: (tx: Tx) => Promise) => Promise, ) => { - const blockBuilder: BlockBuilder = (node as any).sequencer.sequencer.blockBuilder; + const blockBuilder: any = (node as any).sequencer.sequencer.blockBuilder; const originalCreateDeps = blockBuilder.makeBlockBuilderDeps.bind(blockBuilder); - jest - .spyOn(blockBuilder, 'makeBlockBuilderDeps') - .mockImplementation(async (...args: Parameters) => { - const deps = await originalCreateDeps(...args); - t.logger.warn('Creating mocked processor factory'); - const simulator: CppPublicTxSimulator = (deps.processor as any).publicTxSimulator; - const originalSimulate = simulator.simulate.bind(simulator); - // We only stub the simulate method if it's NOT the first time we see the tx - // so the proposer works fine, but we cause the failure in the validators. - jest.spyOn(simulator, 'simulate').mockImplementation((tx: Tx) => { - const txHash = tx.getTxHash().toString(); - if (seenTxs.has(txHash)) { - t.logger.warn('Calling stubbed simulate for tx', { txHash }); - return stub(tx, originalSimulate); - } else { - seenTxs.add(txHash); - t.logger.warn('Calling original simulate for tx', { txHash }); - return originalSimulate(tx); - } - }); - return deps; + jest.spyOn(blockBuilder, 'makeBlockBuilderDeps').mockImplementation(async (...args: any[]) => { + const deps = await originalCreateDeps(...args); + t.logger.warn('Creating mocked processor factory'); + const simulator: CppPublicTxSimulator = (deps.processor as any).publicTxSimulator; + const originalSimulate = simulator.simulate.bind(simulator); + // We only stub the simulate method if it's NOT the first time we see the tx + // so the proposer works fine, but we cause the failure in the validators. + jest.spyOn(simulator, 'simulate').mockImplementation((tx: Tx) => { + const txHash = tx.getTxHash().toString(); + if (seenTxs.has(txHash)) { + t.logger.warn('Calling stubbed simulate for tx', { txHash }); + return stub(tx, originalSimulate); + } else { + seenTxs.add(txHash); + t.logger.warn('Calling original simulate for tx', { txHash }); + return originalSimulate(tx); + } }); + return deps; + }); }; // Have the public tx processor take an extra long time to process the tx, so the validator times out diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 25c5802aef56..0a560e263700 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -43,10 +43,11 @@ import { EpochCache } from '@aztec/epoch-cache'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum/config'; import { EmpireSlashingProposerContract, GovernanceProposerContract, RollupContract } from '@aztec/ethereum/contracts'; import { createL1TxUtilsWithBlobsFromViemWallet } from '@aztec/ethereum/l1-tx-utils-with-blobs'; -import { BlockNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import { SecretValue } from '@aztec/foundation/config'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; +import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; import { Timer } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts'; import { SchnorrHardcodedAccountContract } from '@aztec/noir-contracts.js/SchnorrHardcodedAccount'; @@ -54,7 +55,8 @@ import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; import { SequencerPublisher, SequencerPublisherMetrics } from '@aztec/sequencer-client'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { CommitteeAttestationsAndSigners, L2Block } from '@aztec/stdlib/block'; +import { CommitteeAttestationsAndSigners } from '@aztec/stdlib/block'; +import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { TestWallet } from '@aztec/test-wallet/server'; import { createWorldStateSynchronizer } from '@aztec/world-state'; @@ -250,13 +252,13 @@ class TestVariant { } } - async writeBlocks(blocks: L2Block[]) { - await this.writeJson(`blocks`, { blocks: blocks.map(block => block.toString()) }); + async writeCheckpoints(checkpoints: Checkpoint[]) { + await this.writeJson(`checkpoints`, { checkpoints: checkpoints.map(cp => bufferToHex(cp.toBuffer())) }); } - loadBlocks() { - const json = this.loadJson(`blocks`); - return (json['blocks'] as string[]).map(b => L2Block.fromString(b)); + loadCheckpoints(): Checkpoint[] { + const json = this.loadJson(`checkpoints`); + return (json['checkpoints'] as string[]).map(cp => Checkpoint.fromBuffer(hexToBuffer(cp))); } numberOfBlocksStored() { @@ -359,9 +361,11 @@ describe('e2e_synching', () => { await cheatCodes.rollup.markAsProven(); } - const blocks = await aztecNode.getBlocks(BlockNumber(1), await aztecNode.getBlockNumber()); + const blockNumber = await aztecNode.getBlockNumber(); + const publishedCheckpoints = await aztecNode.getPublishedCheckpoints(CheckpointNumber(1), blockNumber); + const checkpoints = publishedCheckpoints.map(pc => pc.checkpoint); - await variant.writeBlocks(blocks); + await variant.writeCheckpoints(checkpoints); await teardown(); }, 240_400_000, @@ -448,22 +452,19 @@ describe('e2e_synching', () => { }, ); - const blocks = variant.loadBlocks(); + const checkpoints = variant.loadCheckpoints(); - // For each of the blocks we progress time such that it land at the correct time + // For each of the checkpoints we progress time such that it land at the correct time // We create blocks for every ethereum slot simply to make sure that the test is "closer" to // a real world. - for (const block of blocks) { - const targetTime = Number(block.header.globalVariables.timestamp) - ETHEREUM_SLOT_DURATION; + for (const checkpoint of checkpoints) { + const lastBlock = checkpoint.blocks.at(-1)!; + const targetTime = Number(lastBlock.header.globalVariables.timestamp) - ETHEREUM_SLOT_DURATION; while ((await cheatCodes.eth.timestamp()) < targetTime) { await cheatCodes.eth.mine(); } // If it breaks here, first place you should look is the pruning. - await publisher.enqueueProposeCheckpoint( - block.toCheckpoint(), - CommitteeAttestationsAndSigners.empty(), - Signature.empty(), - ); + await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); await cheatCodes.rollup.markAsProven(provenThrough); } diff --git a/yarn-project/end-to-end/src/spartan/tx_metrics.ts b/yarn-project/end-to-end/src/spartan/tx_metrics.ts index c28c076fa69c..2d7a01dc6307 100644 --- a/yarn-project/end-to-end/src/spartan/tx_metrics.ts +++ b/yarn-project/end-to-end/src/spartan/tx_metrics.ts @@ -1,5 +1,5 @@ import type { AztecNode } from '@aztec/aztec.js/node'; -import type { L2Block } from '@aztec/stdlib/block'; +import type { L2BlockNew } from '@aztec/stdlib/block'; import type { TopicType } from '@aztec/stdlib/p2p'; import { Tx, type TxReceipt, TxStatus } from '@aztec/stdlib/tx'; @@ -20,7 +20,7 @@ export type TxInclusionData = { export class TxInclusionMetrics { private data = new Map(); private groups = new Set(); - private blocks = new Map>(); + private blocks = new Map>(); private p2pGossipLatencyByTopic: Partial> = {}; @@ -51,10 +51,13 @@ export class TxInclusionMetrics { } if (!this.blocks.has(blockNumber)) { - this.blocks.set(blockNumber, this.aztecNode.getBlock(blockNumber) as Promise); + this.blocks.set(blockNumber, this.aztecNode.getBlock(blockNumber)); } const block = await this.blocks.get(blockNumber)!; + if (!block) { + return; + } const data = this.data.get(txHash.toString())!; data.blocknumber = blockNumber; data.minedAt = Number(block.header.globalVariables.timestamp); diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 74184d5056ae..34ea6ed84f39 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -5,7 +5,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { retryFastUntil } from '@aztec/foundation/retry'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { L2Block } from '@aztec/stdlib/block'; +import { L2BlockNew } from '@aztec/stdlib/block'; import { EmptyL1RollupConstants, type L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { GasFees } from '@aztec/stdlib/gas'; import type { MerkleTreeReadOperations, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; @@ -347,7 +347,7 @@ describe('P2P Client', () => { finalized: { block: { number: BlockNumber(50), hash: expect.any(String) }, checkpoint: anyCheckpoint }, }); - blockSource.addBlocks([await L2Block.random(BlockNumber(91)), await L2Block.random(BlockNumber(92))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(91)), await L2BlockNew.random(BlockNumber(92))]); blockSource.setCheckpointedBlockNumber(92); await client.sync(); @@ -445,7 +445,7 @@ describe('P2P Client', () => { it('syncs new blocks', async () => { await client.start(); - blockSource.addBlocks([await L2Block.random(BlockNumber(101)), await L2Block.random(BlockNumber(102))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(101)), await L2BlockNew.random(BlockNumber(102))]); await client.sync(); expect(await client.getSyncedLatestBlockNum()).toEqual(102); }); @@ -471,7 +471,7 @@ describe('P2P Client', () => { it('stops tx collection for pruned blocks', async () => { await client.start(); - blockSource.addBlocks([await L2Block.random(BlockNumber(101)), await L2Block.random(BlockNumber(102))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(101)), await L2BlockNew.random(BlockNumber(102))]); await client.sync(); blockSource.removeBlocks(1); @@ -481,7 +481,7 @@ describe('P2P Client', () => { it('stops tx collection for proven blocks', async () => { await client.start(); - blockSource.addBlocks([await L2Block.random(BlockNumber(101)), await L2Block.random(BlockNumber(102))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(101)), await L2BlockNew.random(BlockNumber(102))]); await client.sync(); await advanceToProvenBlock(BlockNumber(101)); @@ -490,25 +490,24 @@ describe('P2P Client', () => { it('triggers tx collection for missing txs from mined blocks', async () => { await client.start(); - const block = await L2Block.random(BlockNumber(101), 3); - const newBlock = block.toL2Block(); + const block = await L2BlockNew.random(BlockNumber(101), { txsPerBlock: 3 }); // Compute the block hash since it gets cached when the p2p client logs it - await newBlock.hash(); + await block.hash(); txPool.hasTxs.mockResolvedValue([true, false, true]); blockSource.addBlocks([block]); await client.sync(); - expect(txCollection.startCollecting).toHaveBeenCalledTimes(2); - const [actualBlock, actualTxHashes] = txCollection.startCollecting.mock.calls[1]; - expect(actualBlock.number).toEqual(newBlock.number); - expect(await actualBlock.hash()).toEqual(await newBlock.hash()); + expect(txCollection.startCollecting).toHaveBeenCalledTimes(1); + const [actualBlock, actualTxHashes] = txCollection.startCollecting.mock.calls[0]; + expect(actualBlock.number).toEqual(block.number); + expect(await actualBlock.hash()).toEqual(await block.hash()); expect(actualTxHashes).toEqual([block.body.txEffects[1].txHash]); }); it('clears non-evictable txs when new blocks are synced', async () => { await client.start(); - blockSource.addBlocks([await L2Block.random(BlockNumber(101))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(101))]); await client.sync(); expect(txPool.clearNonEvictableTxs).toHaveBeenCalled(); @@ -576,7 +575,7 @@ describe('P2P Client', () => { ]); await realClient.start(); - blockSource.addBlocks([await L2Block.random(BlockNumber(101))]); + blockSource.addBlocks([await L2BlockNew.random(BlockNumber(101))]); await realClient.sync(); const tx6 = await mockTx(nextTxSeed++, { maxPriorityFeesPerGas: new GasFees(6, 6) }); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 706bb072f139..7b804c5dcd15 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -182,7 +182,7 @@ export class P2PClient const limit = event.block.number - from + 1; if (limit > 0) { const oldBlocks = await this.l2BlockSource.getBlocks(from, limit); - await this.handleFinalizedL2Blocks(oldBlocks.map(b => b.toL2Block())); + await this.handleFinalizedL2Blocks(oldBlocks); } break; } diff --git a/yarn-project/p2p/src/client/test/p2p_client.integration_block_txs.test.ts b/yarn-project/p2p/src/client/test/p2p_client.integration_block_txs.test.ts index 4876ebd67d6d..83088a51db35 100644 --- a/yarn-project/p2p/src/client/test/p2p_client.integration_block_txs.test.ts +++ b/yarn-project/p2p/src/client/test/p2p_client.integration_block_txs.test.ts @@ -7,7 +7,7 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { emptyChainConfig } from '@aztec/stdlib/config'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; -import { makeBlockProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeBlockProposal } from '@aztec/stdlib/testing'; import { Tx, TxHash } from '@aztec/stdlib/tx'; import { describe, expect, it, jest } from '@jest/globals'; @@ -124,7 +124,7 @@ describe('p2p client integration block txs protocol ', () => { const createBlockProposal = (blockNumber: BlockNumber, blockHash: any, txHashes: any[]) => { return makeBlockProposal({ signer: Secp256k1Signer.random(), - blockHeader: makeL2BlockHeader(1, blockNumber), + blockHeader: makeBlockHeader(1, { blockNumber }), archiveRoot: blockHash, txHashes, }); diff --git a/yarn-project/p2p/src/client/test/p2p_client.integration_message_propagation.test.ts b/yarn-project/p2p/src/client/test/p2p_client.integration_message_propagation.test.ts index d8a6a234951c..d1a46b5dc807 100644 --- a/yarn-project/p2p/src/client/test/p2p_client.integration_message_propagation.test.ts +++ b/yarn-project/p2p/src/client/test/p2p_client.integration_message_propagation.test.ts @@ -9,7 +9,8 @@ import { sleep } from '@aztec/foundation/sleep'; import { emptyChainConfig } from '@aztec/stdlib/config'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { BlockProposal, CheckpointAttestation } from '@aztec/stdlib/p2p'; -import { type MakeConsensusPayloadOptions, makeBlockProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { type MakeConsensusPayloadOptions, makeBlockProposal } from '@aztec/stdlib/testing'; import { Tx, TxHash } from '@aztec/stdlib/tx'; import { describe, expect, it, jest } from '@jest/globals'; @@ -183,7 +184,7 @@ describe('p2p client integration message propagation', () => { // Client 1 sends a block proposal const dummyPayload: MakeConsensusPayloadOptions = { signer: Secp256k1Signer.random(), - header: makeL2BlockHeader(), + header: CheckpointHeader.random(), archive: Fr.random(), txHashes: [TxHash.random()], }; @@ -193,7 +194,7 @@ describe('p2p client integration message propagation', () => { // client 1 sends a checkpoint attestation const attestation = mockCheckpointAttestation( Secp256k1Signer.random(), - Number(dummyPayload.header!.getSlot()), + Number(dummyPayload.header!.slotNumber), dummyPayload.archive, ); await client1.broadcastCheckpointAttestations([attestation]); @@ -335,7 +336,7 @@ describe('p2p client integration message propagation', () => { // Client 1 sends a block proposal const dummyPayload: MakeConsensusPayloadOptions = { signer: Secp256k1Signer.random(), - header: makeL2BlockHeader(), + header: CheckpointHeader.random(), archive: Fr.random(), txHashes: [TxHash.random()], }; @@ -345,7 +346,7 @@ describe('p2p client integration message propagation', () => { // client 1 sends a checkpoint attestation const attestation = mockCheckpointAttestation( Secp256k1Signer.random(), - Number(dummyPayload.header!.getSlot()), + Number(dummyPayload.header!.slotNumber), dummyPayload.archive, ); await client1.client.broadcastCheckpointAttestations([attestation]); diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts index 6baa565e4082..4e6b59c606e8 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts @@ -2,7 +2,13 @@ import { SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { BlockProposal, CheckpointAttestation, CheckpointProposal } from '@aztec/stdlib/p2p'; -import { makeBlockProposal, makeCheckpointProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { + makeBlockHeader, + makeBlockProposal, + makeCheckpointHeader, + makeCheckpointProposal, +} from '@aztec/stdlib/testing'; import type { AttestationPool } from './attestation_pool.js'; import { MAX_PROPOSALS_PER_SLOT } from './kv_attestation_pool.js'; @@ -29,7 +35,7 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo slotNumber: number, archive: Fr = Fr.random(), ): Promise => { - const header = makeL2BlockHeader(1, 2, slotNumber); + const header = makeBlockHeader(1, { slotNumber: SlotNumber(slotNumber) }); return makeBlockProposal({ signer, blockHeader: header, @@ -102,12 +108,13 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo it('should handle duplicate proposals in a slot', async () => { const slotNumber = 420; const archive = Fr.random(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(slotNumber) }); - // Use the same signer for all attestations + // Use the same signer and header for all attestations const attestations: CheckpointAttestation[] = []; const signer = signers[0]; for (let i = 0; i < NUMBER_OF_SIGNERS_PER_TEST; i++) { - attestations.push(mockCheckpointAttestation(signer, slotNumber, archive)); + attestations.push(mockCheckpointAttestation(signer, slotNumber, archive, header)); } // Add them to store and check we end up with only one @@ -256,12 +263,13 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo slotNumber: number, archive: Fr = Fr.random(), ): Promise => { - const header = makeL2BlockHeader(1, 2, slotNumber); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(slotNumber) }); + const blockHeader = makeBlockHeader(1); return makeCheckpointProposal({ signer, - checkpointHeader: header.toCheckpointHeader(), + checkpointHeader, archiveRoot: archive, - lastBlock: { blockHeader: header }, + lastBlock: { blockHeader }, }); }; @@ -306,11 +314,11 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo it('should not store block proposal when checkpoint proposal has no lastBlock', async () => { const slotNumber = 420; const archive = Fr.random(); - const header = makeL2BlockHeader(1, 2, slotNumber); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(slotNumber) }); // Create a checkpoint proposal WITHOUT lastBlock const proposal = await makeCheckpointProposal({ signer: signers[0], - checkpointHeader: header.toCheckpointHeader(), + checkpointHeader, archiveRoot: archive, // No lastBlock }); diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.test.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.test.ts index 0ccee6e5a9f0..28fb25e7f987 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.test.ts @@ -3,7 +3,7 @@ import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { makeBlockProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeBlockProposal } from '@aztec/stdlib/testing'; import { describeAttestationPool } from './attestation_pool_test_suite.js'; import { ATTESTATION_CAP_BUFFER, KvAttestationPool, MAX_PROPOSALS_PER_SLOT } from './kv_attestation_pool.js'; @@ -25,7 +25,7 @@ describe('KV Attestation Pool', () => { describe('BlockProposal behavior', () => { it('should allow adding multiple block proposals for the same slot without cap', async () => { const slotNumber = 100; - const header = makeL2BlockHeader(1, 2, slotNumber); + const header = makeBlockHeader(1, { slotNumber: SlotNumber(slotNumber) }); // Add 1 proposal and re-add it (duplicate) → should be idempotent const p0 = await makeBlockProposal({ blockHeader: header, archiveRoot: Fr.random() }); diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts index b10d487fcbb4..a63087feaa2f 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts @@ -1,3 +1,4 @@ +import { SlotNumber } from '@aztec/foundation/branded-types'; import type { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import { @@ -6,7 +7,7 @@ import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage, } from '@aztec/stdlib/p2p'; -import { makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { type LocalAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; @@ -25,16 +26,17 @@ export const generateAccount = (): LocalAccount => { * @param signer A Secp256k1Signer to create a signature * @param slot The slot number the attestation is for * @param archive The archive root (defaults to random) + * @param header The checkpoint header (defaults to random with given slot) * @returns A Checkpoint Attestation */ export const mockCheckpointAttestation = ( signer: Secp256k1Signer, slot: number = 0, archive: Fr = Fr.random(), + header?: CheckpointHeader, ): CheckpointAttestation => { - // Use arbitrary numbers for all other than slot - const header = makeL2BlockHeader(1, 2, slot); - const payload = new ConsensusPayload(header.toCheckpointHeader(), archive); + header = header ?? CheckpointHeader.random({ slotNumber: SlotNumber(slot) }); + const payload = new ConsensusPayload(header, archive); const attestationHash = getHashedSignaturePayloadEthSignedMessage( payload, diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts index 925c50e52e8f..4da6531b7977 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts @@ -3,7 +3,8 @@ import { NoCommitteeError } from '@aztec/ethereum/contracts'; import { SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; -import { makeCheckpointAttestation, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { makeCheckpointAttestation } from '@aztec/stdlib/testing'; import { mock } from 'jest-mock-extended'; @@ -24,7 +25,7 @@ describe('CheckpointAttestationValidator', () => { it('returns high tolerance error if slot number is not current or next slot', async () => { // Create an attestation for slot 97 - const header = makeL2BlockHeader(1, 97, 97).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(97) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -46,7 +47,7 @@ describe('CheckpointAttestationValidator', () => { it('returns high tolerance error if attester is not in committee', async () => { // The slot is correct, but the attester is not in the committee - const header = makeL2BlockHeader(1, 100, 100).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -68,7 +69,7 @@ describe('CheckpointAttestationValidator', () => { it('returns undefined if checkpoint attestation is valid (current slot)', async () => { // Create an attestation for slot 100 - const header = makeL2BlockHeader(1, 100, 100).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -91,7 +92,7 @@ describe('CheckpointAttestationValidator', () => { it('returns undefined if checkpoint attestation is valid (next slot)', async () => { // Setup attestation for next slot - const header = makeL2BlockHeader(1, 101, 101).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(101) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -114,7 +115,7 @@ describe('CheckpointAttestationValidator', () => { it('returns high tolerance error if proposer signature is invalid', async () => { const wrongProposer = Secp256k1Signer.random(); - const header = makeL2BlockHeader(1, 100, 100).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -136,7 +137,7 @@ describe('CheckpointAttestationValidator', () => { it('returns low tolerance error if no committee exists', async () => { // Create an attestation - const header = makeL2BlockHeader(1, 100, 100).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.test.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.test.ts index 7d722928627d..b684c8f96792 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.test.ts @@ -3,7 +3,13 @@ import { SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; -import { makeCheckpointAttestation, makeCheckpointProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { + makeBlockHeader, + makeCheckpointAttestation, + makeCheckpointHeader, + makeCheckpointProposal, +} from '@aztec/stdlib/testing'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -29,7 +35,7 @@ describe('FishermanAttestationValidator', () => { describe('base validation', () => { it('returns high tolerance error if slot number is not current or next slot', async () => { // Create an attestation for slot 97 - const header = makeL2BlockHeader(1, 97, 97).toCheckpointHeader(); + const header = CheckpointHeader.random({ slotNumber: SlotNumber(97) }); const mockAttestation = makeCheckpointAttestation({ header, attesterSigner: attester, @@ -54,7 +60,7 @@ describe('FishermanAttestationValidator', () => { it('returns high tolerance error if attester is not in committee', async () => { const mockAttestation = makeCheckpointAttestation({ - header: makeL2BlockHeader(1, 100, 100).toCheckpointHeader(), + header: CheckpointHeader.random({ slotNumber: SlotNumber(100) }), attesterSigner: attester, proposerSigner: proposer, }); @@ -77,7 +83,7 @@ describe('FishermanAttestationValidator', () => { it('returns high tolerance error if proposer signature is invalid', async () => { const wrongProposer = Secp256k1Signer.random(); const mockAttestation = makeCheckpointAttestation({ - header: makeL2BlockHeader(1, 100, 100).toCheckpointHeader(), + header: CheckpointHeader.random({ slotNumber: SlotNumber(100) }), attesterSigner: attester, proposerSigner: wrongProposer, }); @@ -112,8 +118,8 @@ describe('FishermanAttestationValidator', () => { }); it('returns undefined if attestation payload matches proposal payload', async () => { - const header = makeL2BlockHeader(1, 100, 100); - const checkpointHeader = header.toCheckpointHeader(); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); + const blockHeader = makeBlockHeader(1); const archive = Fr.random(); const mockAttestation = makeCheckpointAttestation({ header: checkpointHeader, @@ -124,10 +130,10 @@ describe('FishermanAttestationValidator', () => { // Create a matching checkpoint proposal with the same payload const mockProposal = await makeCheckpointProposal({ - checkpointHeader: header.toCheckpointHeader(), + checkpointHeader, signer: proposer, archiveRoot: archive, - lastBlock: { blockHeader: header }, + lastBlock: { blockHeader }, }); attestationPool.getCheckpointProposal.mockResolvedValue(mockProposal); @@ -140,20 +146,21 @@ describe('FishermanAttestationValidator', () => { }); it('returns low tolerance error if attestation payload does not match proposal payload', async () => { - const header1 = makeL2BlockHeader(1, 100, 100); - const header2 = makeL2BlockHeader(2, 100, 100); // Different block number + const checkpointHeader1 = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); + const checkpointHeader2 = makeCheckpointHeader(2, { slotNumber: SlotNumber(100) }); // Different seed = different header + const blockHeader2 = makeBlockHeader(2); const mockAttestation = makeCheckpointAttestation({ - header: header1.toCheckpointHeader(), + header: checkpointHeader1, attesterSigner: attester, proposerSigner: proposer, }); // Create a proposal with a different payload const mockProposal = await makeCheckpointProposal({ - checkpointHeader: header2.toCheckpointHeader(), + checkpointHeader: checkpointHeader2, signer: proposer, - lastBlock: { blockHeader: header2 }, + lastBlock: { blockHeader: blockHeader2 }, }); attestationPool.getCheckpointProposal.mockResolvedValue(mockProposal); @@ -166,9 +173,9 @@ describe('FishermanAttestationValidator', () => { }); it('returns undefined if proposal is not found yet (attestation arrived before proposal)', async () => { - const header = makeL2BlockHeader(1, 100, 100); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ - header: header.toCheckpointHeader(), + header: checkpointHeader, attesterSigner: attester, proposerSigner: proposer, }); @@ -184,19 +191,20 @@ describe('FishermanAttestationValidator', () => { }); it('detects payload mismatch with different archive roots', async () => { - const header = makeL2BlockHeader(1, 100, 100); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); + const blockHeader = makeBlockHeader(1); const mockAttestation = makeCheckpointAttestation({ - header: header.toCheckpointHeader(), + header: checkpointHeader, attesterSigner: attester, proposerSigner: proposer, }); // Create a proposal with the same header but different archive const mockProposal = await makeCheckpointProposal({ - checkpointHeader: header.toCheckpointHeader(), + checkpointHeader, signer: proposer, archiveRoot: Fr.random(), // Different archive - lastBlock: { blockHeader: header }, + lastBlock: { blockHeader }, }); attestationPool.getCheckpointProposal.mockResolvedValue(mockProposal); @@ -206,20 +214,21 @@ describe('FishermanAttestationValidator', () => { }); it('detects payload mismatch with different header hash', async () => { - const header1 = makeL2BlockHeader(1, 100, 100); - const header2 = makeL2BlockHeader(1, 100, 100); // Same slot but different random content + const checkpointHeader1 = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); + const checkpointHeader2 = makeCheckpointHeader(2, { slotNumber: SlotNumber(100) }); // Same slot but different content + const blockHeader2 = makeBlockHeader(2); const mockAttestation = makeCheckpointAttestation({ - header: header1.toCheckpointHeader(), + header: checkpointHeader1, attesterSigner: attester, proposerSigner: proposer, }); // Create a proposal with a different header (different hash) const mockProposal = await makeCheckpointProposal({ - checkpointHeader: header2.toCheckpointHeader(), + checkpointHeader: checkpointHeader2, signer: proposer, - lastBlock: { blockHeader: header2 }, + lastBlock: { blockHeader: blockHeader2 }, }); attestationPool.getCheckpointProposal.mockResolvedValue(mockProposal); @@ -244,9 +253,9 @@ describe('FishermanAttestationValidator', () => { }); it('handles attestation pool errors gracefully', async () => { - const header = makeL2BlockHeader(1, 100, 100); + const checkpointHeader = makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }); const mockAttestation = makeCheckpointAttestation({ - header: header.toCheckpointHeader(), + header: checkpointHeader, attesterSigner: attester, proposerSigner: proposer, }); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts index dbd2fc0e3f5e..873d387afd70 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts @@ -1,8 +1,8 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { SlotNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { makeBlockProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeBlockProposal } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { mock } from 'jest-mock-extended'; @@ -15,7 +15,7 @@ describe('BlockProposalValidator', () => { validatorFactory: (epochCache, opts) => new BlockProposalValidator(epochCache, opts), makeProposal: makeBlockProposal, makeHeader: (epochNumber: number | bigint, slotNumber: number | bigint, blockNumber: number | bigint) => - makeL2BlockHeader(0, Number(blockNumber), Number(slotNumber)), + makeBlockHeader(0, { blockNumber: BlockNumber(Number(blockNumber)), slotNumber: SlotNumber(Number(slotNumber)) }), getSigner: () => Secp256k1Signer.random(), getAddress: (signer?: Secp256k1Signer) => (signer ? signer.address : EthAddress.random()), getSlot: (slot: number | bigint) => SlotNumber(Number(slot)), diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts index a0c3a892b055..9bc2e2888864 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts @@ -2,9 +2,9 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; import { SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { EthAddress } from '@aztec/foundation/eth-address'; -import type { L2BlockHeader } from '@aztec/stdlib/block'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { MakeCheckpointProposalOptions } from '@aztec/stdlib/testing'; -import { makeCheckpointProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { mock } from 'jest-mock-extended'; @@ -16,26 +16,30 @@ describe('CheckpointProposalValidator', () => { /** * Adapter function to convert shared test options to CheckpointProposal options. * The shared test uses blockHeader/lastBlockHeader, but CheckpointProposal uses - * checkpointHeader (derived from L2BlockHeader) and lastBlock.blockHeader. + * checkpointHeader and lastBlock.blockHeader. */ const makeCheckpointProposalAdapter = (options?: { - blockHeader?: L2BlockHeader; - lastBlockHeader?: L2BlockHeader; + blockHeader?: CheckpointHeader; + lastBlockHeader?: CheckpointHeader; signer?: Secp256k1Signer; txHashes?: TxHash[]; txs?: any[]; }) => { - // Use the blockHeader to derive the checkpointHeader (for slotNumber matching) - const l2BlockHeader = options?.blockHeader ?? makeL2BlockHeader(1); - const checkpointHeader = l2BlockHeader.toCheckpointHeader(); + // Use the blockHeader directly as the checkpointHeader + const checkpointHeader = options?.blockHeader ?? makeCheckpointHeader(1); + + // Create a BlockHeader for the lastBlock using the slot from the checkpointHeader + const lastBlockBlockHeader = options?.lastBlockHeader + ? makeBlockHeader(0, { slotNumber: checkpointHeader.slotNumber }) + : undefined; const adaptedOptions: MakeCheckpointProposalOptions = { signer: options?.signer, checkpointHeader, - // Use lastBlockHeader for the lastBlock if provided - lastBlock: options?.lastBlockHeader + // Create lastBlock with a proper BlockHeader + lastBlock: lastBlockBlockHeader ? { - blockHeader: options.lastBlockHeader, + blockHeader: lastBlockBlockHeader, txHashes: options?.txHashes, txs: options?.txs, } @@ -48,8 +52,8 @@ describe('CheckpointProposalValidator', () => { sharedProposalValidatorTests({ validatorFactory: (epochCache, opts) => new CheckpointProposalValidator(epochCache, opts), makeProposal: makeCheckpointProposalAdapter, - makeHeader: (epochNumber: number | bigint, slotNumber: number | bigint, blockNumber: number | bigint) => - makeL2BlockHeader(0, Number(blockNumber), Number(slotNumber)), + makeHeader: (_epochNumber: number | bigint, slotNumber: number | bigint, _blockNumber: number | bigint) => + makeCheckpointHeader(0, { slotNumber: SlotNumber(Number(slotNumber)) }), getSigner: () => Secp256k1Signer.random(), getAddress: (signer?: Secp256k1Signer) => (signer ? signer.address : EthAddress.random()), getSlot: (slot: number | bigint) => SlotNumber(Number(slot)), diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts index f01ff934d32d..df0dab6fbeff 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts @@ -2,7 +2,7 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; -import { L2Block } from '@aztec/stdlib/block'; +import { L2BlockNew } from '@aztec/stdlib/block'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; import type { TxValidator } from '@aztec/stdlib/tx'; import { getTelemetryClient } from '@aztec/telemetry-client'; @@ -107,7 +107,7 @@ describe('LibP2PService', () => { describe('validateRequestedBlock', () => { let service: any; - type GetBlockFn = (n: number) => Promise; + type GetBlockFn = (n: number) => Promise; let archiver: { getBlock: jest.MockedFunction }; let peerManager: MockProxy; let peerId: PeerId; @@ -128,7 +128,7 @@ describe('LibP2PService', () => { it('should return false and penalize on number mismatch', async () => { const requested = new Fr(10); - const resp = await L2Block.random(BlockNumber(9)); + const resp = await L2BlockNew.random(BlockNumber(9)); const ok = await service.validateRequestedBlock(requested, resp, peerId); @@ -139,7 +139,7 @@ describe('LibP2PService', () => { it('should return false (no penalty) when numbers match and no local block', async () => { archiver.getBlock.mockResolvedValue(undefined); const requested = new Fr(10); - const resp = await L2Block.random(BlockNumber(10)); + const resp = await L2BlockNew.random(BlockNumber(10)); const ok = await service.validateRequestedBlock(requested, resp, peerId); @@ -149,9 +149,9 @@ describe('LibP2PService', () => { it('should return true when numbers match and hashes match', async () => { const requested = new Fr(10); - const local = await L2Block.random(BlockNumber(10)); + const local = await L2BlockNew.random(BlockNumber(10)); - const resp = L2Block.fromBuffer(local.toBuffer()); + const resp = L2BlockNew.fromBuffer(local.toBuffer()); archiver.getBlock.mockResolvedValue(local); const ok = await service.validateRequestedBlock(requested, resp, peerId); @@ -162,9 +162,9 @@ describe('LibP2PService', () => { it('should return false and penalize when hashes mismatch', async () => { const requested = new Fr(10); - const local = await L2Block.random(BlockNumber(10)); + const local = await L2BlockNew.random(BlockNumber(10)); - const resp = L2Block.fromBuffer(local.toBuffer()); + const resp = L2BlockNew.fromBuffer(local.toBuffer()); resp.header.globalVariables.coinbase = EthAddress.random(); archiver.getBlock.mockResolvedValue(local); @@ -177,7 +177,7 @@ describe('LibP2PService', () => { it('should return false on archiver error', async () => { archiver.getBlock.mockRejectedValue(new Error('boom')); const requested = new Fr(10); - const resp = await L2Block.random(BlockNumber(10)); + const resp = await L2BlockNew.random(BlockNumber(10)); const ok = await service.validateRequestedBlock(requested, resp, peerId); diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index b76afde0438a..61e75fcf2400 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -8,7 +8,7 @@ import { Timer } from '@aztec/foundation/timer'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; -import type { EthAddress, L2Block, L2BlockSource } from '@aztec/stdlib/block'; +import type { EthAddress, L2BlockNew, L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import { GasFees } from '@aztec/stdlib/gas'; import type { ClientProtocolCircuitVerifier, PeerInfo, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; @@ -1336,7 +1336,7 @@ export class LibP2PService extends })) private async validateRequestedBlock( requestedBlockNumber: Fr, - responseBlock: L2Block, + responseBlock: L2BlockNew, peerId: PeerId, ): Promise { try { diff --git a/yarn-project/p2p/src/services/reqresp/interface.ts b/yarn-project/p2p/src/services/reqresp/interface.ts index 2669ea33d405..88b87fed935f 100644 --- a/yarn-project/p2p/src/services/reqresp/interface.ts +++ b/yarn-project/p2p/src/services/reqresp/interface.ts @@ -1,5 +1,5 @@ import { Fr } from '@aztec/foundation/curves/bn254'; -import { L2Block } from '@aztec/stdlib/block'; +import { L2BlockNew } from '@aztec/stdlib/block'; import { TxArray, TxHashArray } from '@aztec/stdlib/tx'; import type { PeerId } from '@libp2p/interface'; @@ -198,7 +198,7 @@ export const subProtocolMap = { }, [ReqRespSubProtocol.BLOCK]: { request: Fr, // block number - response: L2Block, + response: L2BlockNew, }, [ReqRespSubProtocol.AUTH]: { request: AuthRequest, diff --git a/yarn-project/p2p/src/services/reqresp/reqresp.test.ts b/yarn-project/p2p/src/services/reqresp/reqresp.test.ts index 73149c7caf5b..9bc1d6b1efc9 100644 --- a/yarn-project/p2p/src/services/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/services/reqresp/reqresp.test.ts @@ -2,7 +2,7 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { times } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { sleep } from '@aztec/foundation/sleep'; -import { L2Block, type L2BlockSource } from '@aztec/stdlib/block'; +import { L2BlockNew, type L2BlockSource } from '@aztec/stdlib/block'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; import { mockTx } from '@aztec/stdlib/testing'; import { Tx, TxArray, TxHash, TxHashArray } from '@aztec/stdlib/tx'; @@ -374,7 +374,7 @@ describe('ReqResp', () => { it('should handle block requests', async () => { const blockNumber = 1; const blockNumberFr = Fr.ONE; - const block = await L2Block.random(BlockNumber(blockNumber)); + const block = await L2BlockNew.random(BlockNumber(blockNumber)); const l2BlockSource: MockProxy = mock(); l2BlockSource.getBlock.mockImplementation((_blockNumber: number) => { @@ -398,7 +398,7 @@ describe('ReqResp', () => { ); expectSuccess(resp); - const res = L2Block.fromBuffer(resp.data); + const res = L2BlockNew.fromBuffer(resp.data); expect(res).toEqual(block); }); }); diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 0174d06c9f41..7f1691fbbaaf 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -4,7 +4,6 @@ "type": "module", "exports": { ".": "./dest/index.js", - "./block-factory": "./dest/block-factory/index.js", "./broker": "./dest/proving_broker/index.js", "./broker/config": "./dest/proving_broker/config.js", "./orchestrator": "./dest/orchestrator/index.js", diff --git a/yarn-project/prover-client/src/block-factory/index.ts b/yarn-project/prover-client/src/block-factory/index.ts deleted file mode 100644 index f0f05ac3081b..000000000000 --- a/yarn-project/prover-client/src/block-factory/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './light.js'; diff --git a/yarn-project/prover-client/src/block-factory/light.test.ts b/yarn-project/prover-client/src/block-factory/light.test.ts deleted file mode 100644 index 2b880fb05cd4..000000000000 --- a/yarn-project/prover-client/src/block-factory/light.test.ts +++ /dev/null @@ -1,424 +0,0 @@ -import { TestCircuitProver } from '@aztec/bb-prover'; -import { SpongeBlob, encodeBlockEndBlobData } from '@aztec/blob-lib'; -import { - ARCHIVE_HEIGHT, - CHONK_PROOF_LENGTH, - L1_TO_L2_MSG_SUBTREE_HEIGHT, - L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, - NESTED_RECURSIVE_PROOF_LENGTH, - NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - NUM_BASE_PARITY_PER_ROOT_PARITY, -} from '@aztec/constants'; -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { padArrayEnd, times, timesParallel } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { type Tuple, assertLength } from '@aztec/foundation/serialize'; -import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks'; -import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; -import { ProtocolContractsList, protocolContractsHash } from '@aztec/protocol-contracts'; -import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice'; -import { PublicDataWrite } from '@aztec/stdlib/avm'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { GasFees } from '@aztec/stdlib/gas'; -import type { MerkleTreeWriteOperations, ServerCircuitProver } from '@aztec/stdlib/interfaces/server'; -import { - ParityBasePrivateInputs, - type ParityBaseProofData, - ParityPublicInputs, - ParityRootPrivateInputs, -} from '@aztec/stdlib/parity'; -import { ProofData, type RecursiveProof, makeEmptyRecursiveProof } from '@aztec/stdlib/proofs'; -import { - BlockRootEmptyTxFirstRollupPrivateInputs, - BlockRootFirstRollupPrivateInputs, - BlockRootSingleTxFirstRollupPrivateInputs, - CheckpointConstantData, - type PrivateBaseRollupHints, - PrivateTxBaseRollupPrivateInputs, - TxMergeRollupPrivateInputs, - type TxRollupPublicInputs, -} from '@aztec/stdlib/rollup'; -import { mockProcessedTx } from '@aztec/stdlib/testing'; -import { type AppendOnlyTreeSnapshot, MerkleTreeId, PublicDataTreeLeaf } from '@aztec/stdlib/trees'; -import { GlobalVariables, type ProcessedTx } from '@aztec/stdlib/tx'; -import { type MerkleTreeAdminDatabase, NativeWorldStateService } from '@aztec/world-state'; - -import { jest } from '@jest/globals'; - -import { - buildHeaderFromCircuitOutputs, - getRootTreeSiblingPath, - getSubtreeSiblingPath, - getTreeSnapshot, - insertSideEffectsAndBuildBaseRollupHints, -} from '../orchestrator/block-building-helpers.js'; -import { buildBlockWithCleanDB } from './light.js'; - -jest.setTimeout(50_000); - -describe('LightBlockBuilder', () => { - let simulator: ServerCircuitProver; - let globalVariables: GlobalVariables; - let l1ToL2Messages: Fr[]; - let vkTreeRoot: Fr; - - let db: MerkleTreeAdminDatabase; - let fork: MerkleTreeWriteOperations; - let expectsFork: MerkleTreeWriteOperations; - - let emptyProof: RecursiveProof; - let emptyRollupProof: RecursiveProof; - let emptyChonkProof: RecursiveProof; - - let feePayer: AztecAddress; - let feePayerSlot: Fr; - let feePayerBalance: Fr; - const gasFees = new GasFees(8, 9); - const expectedTxFee = new Fr(0x2200); - const proverId = new Fr(112233); - - beforeAll(() => { - simulator = new TestCircuitProver(); - vkTreeRoot = getVKTreeRoot(); - emptyProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); - emptyRollupProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH); - emptyChonkProof = makeEmptyRecursiveProof(CHONK_PROOF_LENGTH); - }); - - beforeEach(async () => { - feePayer = await AztecAddress.random(); - feePayerBalance = new Fr(10n ** 20n); - feePayerSlot = await computeFeePayerBalanceLeafSlot(feePayer); - const prefilledPublicData = [new PublicDataTreeLeaf(feePayerSlot, feePayerBalance)]; - - db = await NativeWorldStateService.tmp( - undefined /* rollupAddress */, - true /* cleanupTmpDir */, - prefilledPublicData, - ); - - l1ToL2Messages = times(7, i => new Fr(i + 1)); - fork = await db.fork(); - expectsFork = await db.fork(); - const initialHeader = fork.getInitialHeader(); - globalVariables = GlobalVariables.from({ - ...initialHeader.globalVariables, - gasFees, - blockNumber: BlockNumber(initialHeader.globalVariables.blockNumber + 1), - timestamp: initialHeader.globalVariables.timestamp + 1n, - }); - }); - - afterEach(async () => { - await fork.close(); - await expectsFork.close(); - }); - - afterAll(async () => { - await db.close(); - }); - - it('builds a 2 tx header', async () => { - const txs = await timesParallel(2, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds a 3 tx header', async () => { - const txs = await timesParallel(3, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const merge = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - return Promise.resolve([merge, rollupOutputs[2]]); - }); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds a 4 tx header', async () => { - const txs = await timesParallel(4, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - return [mergeLeft, mergeRight]; - }); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds a 4 tx header with no l1 to l2 messages', async () => { - const l1ToL2Messages: Fr[] = []; - const txs = await timesParallel(4, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - return [mergeLeft, mergeRight]; - }); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds a 5 tx header', async () => { - const txs = await timesParallel(5, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const merge10 = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const merge11 = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - const merge20 = await getMergeOutput(merge10, merge11); - return [merge20, rollupOutputs[4]]; - }); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds a single tx header', async () => { - const txs = await timesParallel(1, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - it('builds an empty header', async () => { - const txs: ProcessedTx[] = []; - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header.equals(expectedHeader)).toBe(true); - }); - - const makeTx = (i: number) => { - feePayerBalance = new Fr(feePayerBalance.toBigInt() - expectedTxFee.toBigInt()); - const feePaymentPublicDataWrite = new PublicDataWrite(feePayerSlot, feePayerBalance); - - return mockProcessedTx({ - anchorBlockHeader: fork.getInitialHeader(), - globalVariables, - vkTreeRoot, - protocolContracts: ProtocolContractsList, - seed: i + 1, - feePayer, - feePaymentPublicDataWrite, - privateOnly: true, - }); - }; - - // Builds the block header using the ts block builder - const buildHeader = async (txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { - const block = await buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, fork); - - return block.getBlockHeader(); - }; - - // Builds the block header using circuit outputs - // Requires a callback for manually assembling the merge rollup tree - const buildExpectedHeader = async ( - txs: ProcessedTx[], - l1ToL2Messages: Fr[], - getTopMerges?: (rollupOutputs: TxRollupPublicInputs[]) => Promise, - ) => { - if (txs.length <= 2) { - // No need to run a merge if there's 0-2 txs - getTopMerges = rollupOutputs => Promise.resolve(rollupOutputs); - } - - // Get the states before inserting new leaves. - const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, expectsFork); - const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, expectsFork); - const lastL1ToL2MessageSubtreeRootSiblingPath = padArrayEnd( - await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, expectsFork), - Fr.ZERO, - L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, - ); - const lastL1ToL2Snapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); - - const parityOutput = await getParityOutput(l1ToL2Messages); - const newL1ToL2Snapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); - - const spongeBlobState = SpongeBlob.init(); - const rollupOutputs = await getPrivateBaseRollupOutputs(txs, lastArchive, newL1ToL2Snapshot, spongeBlobState); - - const previousRollups = await getTopMerges!(rollupOutputs); - const rootOutput = await getBlockRootOutput( - previousRollups, - parityOutput, - lastArchive, - lastArchiveSiblingPath, - lastL1ToL2Snapshot, - lastL1ToL2MessageSubtreeRootSiblingPath, - ); - - // Absorb blob end states into the sponge blob. - const noteHashSnapshot = await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, expectsFork); - const nullifierSnapshot = await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, expectsFork); - const publicDataSnapshot = await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, expectsFork); - const blockEndStates = encodeBlockEndBlobData({ - blockEndMarker: { - blockNumber: globalVariables.blockNumber, - timestamp: globalVariables.timestamp, - numTxs: txs.length, - }, - blockEndStateField: { - l1ToL2MessageNextAvailableLeafIndex: newL1ToL2Snapshot.nextAvailableLeafIndex, - noteHashNextAvailableLeafIndex: noteHashSnapshot.nextAvailableLeafIndex, - nullifierNextAvailableLeafIndex: nullifierSnapshot.nextAvailableLeafIndex, - publicDataNextAvailableLeafIndex: publicDataSnapshot.nextAvailableLeafIndex, - totalManaUsed: txs.reduce((acc, tx) => acc + BigInt(tx.gasUsed.totalGas.l2Gas), 0n), - }, - lastArchiveRoot: lastArchive.root, - noteHashRoot: noteHashSnapshot.root, - nullifierRoot: nullifierSnapshot.root, - publicDataRoot: publicDataSnapshot.root, - l1ToL2MessageRoot: newL1ToL2Snapshot.root, - }); - await spongeBlobState.absorb(blockEndStates); - - const expectedHeader = await buildHeaderFromCircuitOutputs(rootOutput); - expect(expectedHeader.spongeBlobHash).toEqual(await spongeBlobState.squeeze()); - - // Ensure that the expected mana used is the sum of the txs' gas used - const expectedManaUsed = txs.reduce((acc, tx) => acc + tx.gasUsed.totalGas.l2Gas, 0); - expect(expectedHeader.totalManaUsed.toNumber()).toBe(expectedManaUsed); - - await expectsFork.updateArchive(expectedHeader); - const newArchiveRoot = (await expectsFork.getTreeInfo(MerkleTreeId.ARCHIVE)).root; - expect(newArchiveRoot).toEqual(rootOutput.newArchive.root.toBuffer()); - - return expectedHeader; - }; - - const getPrivateBaseRollupOutputs = async ( - txs: ProcessedTx[], - lastArchive: AppendOnlyTreeSnapshot, - newL1ToL2Snapshot: AppendOnlyTreeSnapshot, - // Mutable state. - spongeBlobState: SpongeBlob, - ) => { - const rollupOutputs = []; - for (const tx of txs) { - const vkData = getVkData('HidingKernelToRollup'); - const hidingKernelProofData = new ProofData( - tx.data.toPrivateToRollupKernelCircuitPublicInputs(), - emptyChonkProof, - vkData, - ); - const hints = await insertSideEffectsAndBuildBaseRollupHints( - tx, - lastArchive, - newL1ToL2Snapshot, - spongeBlobState.clone(), - proverId, - expectsFork, - ); - await spongeBlobState.absorb(tx.txEffect.toBlobFields()); - const inputs = new PrivateTxBaseRollupPrivateInputs(hidingKernelProofData, hints as PrivateBaseRollupHints); - const result = await simulator.getPrivateTxBaseRollupProof(inputs); - // Update `expectedTxFee` if the fee changes. - expect(result.inputs.accumulatedFees).toEqual(expectedTxFee); - rollupOutputs.push(result.inputs); - } - return rollupOutputs; - }; - - const getMergeOutput = async (left: TxRollupPublicInputs, right: TxRollupPublicInputs) => { - const baseRollupVk = getVkData('PrivateTxBaseRollupArtifact'); - const leftInput = new ProofData(left, emptyRollupProof, baseRollupVk); - const rightInput = new ProofData(right, emptyRollupProof, baseRollupVk); - const inputs = new TxMergeRollupPrivateInputs([leftInput, rightInput]); - const result = await simulator.getTxMergeRollupProof(inputs); - return result.inputs; - }; - - const getParityOutput = async (msgs: Fr[]) => { - const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - await expectsFork.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2Messages); - - const parityBases: ParityBaseProofData[] = []; - const baseParityVk = getVkData('ParityBaseArtifact'); - for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) { - const input = ParityBasePrivateInputs.fromSlice(l1ToL2Messages, i, vkTreeRoot); - const { inputs } = await simulator.getBaseParityProof(input); - parityBases.push(new ProofData(inputs, emptyProof, baseParityVk)); - } - - const rootParityInput = new ParityRootPrivateInputs(assertLength(parityBases, NUM_BASE_PARITY_PER_ROOT_PARITY)); - const result = await simulator.getRootParityProof(rootParityInput); - return result.inputs; - }; - - const getBlockRootOutput = async ( - previousRollups: TxRollupPublicInputs[], - parityOutput: ParityPublicInputs, - lastArchive: AppendOnlyTreeSnapshot, - lastArchiveSiblingPath: Tuple, - lastL1ToL2Snapshot: AppendOnlyTreeSnapshot, - lastL1ToL2MessageSubtreeRootSiblingPath: Tuple, - ) => { - const mergeRollupVk = getVkData( - previousRollups.length === 1 ? 'PrivateTxBaseRollupArtifact' : 'TxMergeRollupArtifact', - ); - const previousRollupsProofs = previousRollups.map(r => new ProofData(r, emptyRollupProof, mergeRollupVk)); - - const rootParityVk = getVkData('ParityRootArtifact'); - const l1ToL2Roots = new ProofData(parityOutput, emptyProof, rootParityVk); - - // The sibling paths to insert the new leaf are the last sibling paths. - const newArchiveSiblingPath = lastArchiveSiblingPath; - const newL1ToL2MessageSubtreeRootSiblingPath = lastL1ToL2MessageSubtreeRootSiblingPath; - - if (previousRollups.length === 0) { - const previousBlockHeader = expectsFork.getInitialHeader(); - const constants = CheckpointConstantData.from({ - chainId: globalVariables.chainId, - version: globalVariables.version, - vkTreeRoot, - protocolContractsHash, - proverId, - slotNumber: globalVariables.slotNumber, - coinbase: globalVariables.coinbase, - feeRecipient: globalVariables.feeRecipient, - gasFees: globalVariables.gasFees, - }); - const inputs = BlockRootEmptyTxFirstRollupPrivateInputs.from({ - l1ToL2Roots, - previousState: previousBlockHeader.state, - previousArchive: lastArchive, - constants, - timestamp: globalVariables.timestamp, - newArchiveSiblingPath, - newL1ToL2MessageSubtreeRootSiblingPath, - }); - return (await simulator.getBlockRootEmptyTxFirstRollupProof(inputs)).inputs; - } else if (previousRollups.length === 1) { - const inputs = BlockRootSingleTxFirstRollupPrivateInputs.from({ - l1ToL2Roots, - previousRollup: previousRollupsProofs[0], - previousL1ToL2: lastL1ToL2Snapshot, - newArchiveSiblingPath, - newL1ToL2MessageSubtreeRootSiblingPath, - }); - return (await simulator.getBlockRootSingleTxFirstRollupProof(inputs)).inputs; - } else { - const inputs = BlockRootFirstRollupPrivateInputs.from({ - l1ToL2Roots, - previousRollups: [previousRollupsProofs[0], previousRollupsProofs[1]], - previousL1ToL2: lastL1ToL2Snapshot, - newArchiveSiblingPath, - newL1ToL2MessageSubtreeRootSiblingPath, - }); - return (await simulator.getBlockRootFirstRollupProof(inputs)).inputs; - } - }; -}); diff --git a/yarn-project/prover-client/src/block-factory/light.ts b/yarn-project/prover-client/src/block-factory/light.ts deleted file mode 100644 index 34437e700199..000000000000 --- a/yarn-project/prover-client/src/block-factory/light.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib'; -import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants'; -import { padArrayEnd } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { createLogger } from '@aztec/foundation/log'; -import { L2Block, L2BlockHeader } from '@aztec/stdlib/block'; -import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server'; -import { - accumulateCheckpointOutHashes, - computeBlockOutHash, - computeInHashFromL1ToL2Messages, -} from '@aztec/stdlib/messaging'; -import { MerkleTreeId } from '@aztec/stdlib/trees'; -import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx'; -import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; - -import { - buildHeaderAndBodyFromTxs, - getTreeSnapshot, - insertSideEffects, -} from '../orchestrator/block-building-helpers.js'; - -/** - * Builds a block and its header from a set of processed tx without running any circuits. - * - * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc - * PRIOR to calling `buildBlock`. - * - * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the - * PublicProcessor which will do this for you as it processes transactions. - * - * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper - * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you. - * - * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint. - */ -export class LightweightBlockFactory implements IBlockFactory { - private globalVariables?: GlobalVariables; - private l1ToL2Messages?: Fr[]; - private txs: ProcessedTx[] | undefined; - - private readonly logger = createLogger('lightweight-block-factory'); - - constructor( - private previousCheckpointOutHashes: Fr[], - private db: MerkleTreeWriteOperations, - private telemetry: TelemetryClient = getTelemetryClient(), - ) {} - - async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - this.logger.debug('Starting new block', { globalVariables: globalVariables.toInspect(), l1ToL2Messages }); - this.globalVariables = globalVariables; - this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - this.txs = undefined; - // Update L1 to L2 tree - await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!); - } - - addTxs(txs: ProcessedTx[]): Promise { - // Most times, `addTxs` is only called once per block. - // So avoid copies. - if (this.txs === undefined) { - this.txs = txs; - } else { - this.txs.push(...txs); - } - return Promise.resolve(); - } - - setBlockCompleted(): Promise { - return this.buildBlock(); - } - - private async buildBlock(): Promise { - const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); - const state = await this.db.getStateReference(); - - const txs = this.txs ?? []; - const startSpongeBlob = SpongeBlob.init(); - - const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs( - txs, - lastArchive, - state, - this.globalVariables!, - startSpongeBlob, - true, - ); - - header.state.validate(); - - await this.db.updateArchive(header); - const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); - - const blockOutHash = computeBlockOutHash(txs.map(tx => tx.txEffect.l2ToL1Msgs)); - // There's only one block per checkpoint, so the checkpoint out hash equals the block out hash. - const checkpointOutHash = blockOutHash; - const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]); - const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages!); - const numBlobFields = blockBlobFields.length + 1; - const blobFields = blockBlobFields.concat([encodeCheckpointEndMarker({ numBlobFields })]); - const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields)); - const blockHeaderHash = await header.hash(); - const l2BlockHeader = L2BlockHeader.from({ - ...header, - blockHeadersHash: blockHeaderHash, - blobsHash, - inHash, - epochOutHash, - }); - - const block = new L2Block(newArchive, l2BlockHeader, body); - - this.logger.debug(`Built block ${block.number}`, { - globalVariables: this.globalVariables?.toInspect(), - archiveRoot: newArchive.root.toString(), - stateReference: header.state.toInspect(), - blockHash: (await block.hash()).toString(), - txs: block.body.txEffects.map(tx => tx.txHash.toString()), - }); - - return block; - } -} - -/** - * Inserts the processed transactions into the DB, then creates a block. - * @param db - A db fork to use for block building which WILL BE MODIFIED. - */ -export async function buildBlockWithCleanDB( - txs: ProcessedTx[], - globalVariables: GlobalVariables, - l1ToL2Messages: Fr[], - db: MerkleTreeWriteOperations, - telemetry: TelemetryClient = getTelemetryClient(), -) { - const builder = new LightweightBlockFactory([], db, telemetry); - await builder.startNewBlock(globalVariables, l1ToL2Messages); - - for (const tx of txs) { - await insertSideEffects(tx, db); - } - await builder.addTxs(txs); - - return await builder.setBlockCompleted(); -} diff --git a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts index 85dea855a1d1..fdb04729e3e3 100644 --- a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts +++ b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts @@ -1,14 +1,9 @@ import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import { timesParallel } from '@aztec/foundation/collection'; +import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { L2TipsKVStore } from '@aztec/kv-store/stores'; -import { - GENESIS_CHECKPOINT_HEADER_HASH, - L2Block, - L2BlockHash, - L2BlockNew, - type L2BlockStream, -} from '@aztec/stdlib/block'; +import { GENESIS_CHECKPOINT_HEADER_HASH, L2BlockHash, L2BlockNew, type L2BlockStream } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import { jest } from '@jest/globals'; @@ -55,11 +50,11 @@ describe('BlockSynchronizer', () => { it('removes notes from db on a reorg', async () => { const rollback = jest.spyOn(noteStore, 'rollback').mockImplementation(() => Promise.resolve()); - const block3Hash = L2BlockHash.fromNumber(3); + const block3Hash = Fr.fromString('0x3'); aztecNode.getBlockHeader.mockImplementation(async block => { // For the test, when block hash matches block 3, return block header for block 3 - if (block instanceof L2BlockHash && block.equals(block3Hash)) { - return (await L2Block.random(BlockNumber(3))).getBlockHeader(); + if (block instanceof L2BlockHash && Fr.fromBuffer(block.toBuffer()).equals(block3Hash)) { + return (await L2BlockNew.random(BlockNumber(3))).header; } return undefined; }); @@ -79,11 +74,11 @@ describe('BlockSynchronizer', () => { it('removes private events from db on a reorg', async () => { const rollback = jest.spyOn(privateEventStore, 'rollback').mockImplementation(() => Promise.resolve()); - const block3Hash = L2BlockHash.fromNumber(3); + const block3Hash = Fr.fromString('0x3'); aztecNode.getBlockHeader.mockImplementation(async block => { // For the test, when block hash matches block 3, return block header for block 3 - if (block instanceof L2BlockHash && block.equals(block3Hash)) { - return (await L2Block.random(BlockNumber(3))).getBlockHeader(); + if (block instanceof L2BlockHash && Fr.fromBuffer(block.toBuffer()).equals(block3Hash)) { + return (await L2BlockNew.random(BlockNumber(3))).header; } return undefined; }); diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts index bf4dbe152e4e..333e88344a0e 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts @@ -161,7 +161,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra } const block = await this.aztecNode.getBlock(blockNumber); - return block?.getBlockHeader() || undefined; + return block?.header; } /** diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index a0ef3ae401b6..2375b9013a90 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -1,12 +1,7 @@ export * from './client/index.js'; export * from './config.js'; export * from './publisher/index.js'; -export { - FullNodeBlockBuilder as BlockBuilder, - Sequencer, - SequencerState, - type SequencerEvents, -} from './sequencer/index.js'; +export { Sequencer, SequencerState, type SequencerEvents } from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? // ISSUE(#9832) diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 056bda4b739d..c3c9e08c8aa6 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -16,9 +16,10 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { sleep } from '@aztec/foundation/sleep'; import { TestDateProvider } from '@aztec/foundation/timer'; import { EmpireBaseAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { CommitteeAttestationsAndSigners, L2Block, Signature } from '@aztec/stdlib/block'; +import { CommitteeAttestationsAndSigners, L2BlockNew, Signature } from '@aztec/stdlib/block'; +import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { SlashFactoryContract } from '@aztec/stdlib/l1-contracts'; -import type { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -63,7 +64,7 @@ describe('SequencerPublisher', () => { let proposeTxHash: `0x${string}`; let proposeTxReceipt: GetTransactionReceiptReturnType; - let l2Block: L2Block; + let l2Block: L2BlockNew; let header: CheckpointHeader; let archive: Buffer; @@ -83,9 +84,9 @@ describe('SequencerPublisher', () => { blobClient = mock(); blobClient.sendBlobsToFilestore.mockResolvedValue(true); - l2Block = await L2Block.random(BlockNumber(42)); + l2Block = await L2BlockNew.random(BlockNumber(42)); - header = l2Block.getCheckpointHeader(); + header = CheckpointHeader.random(); archive = l2Block.archive.root.toBuffer(); proposeTxHash = `0x${Buffer.from('txHashPropose').toString('hex')}`; // random tx hash @@ -170,9 +171,9 @@ describe('SequencerPublisher', () => { const currentL2Slot = publisher.getCurrentL2Slot(); - l2Block = await L2Block.random(BlockNumber(42), undefined, undefined, undefined, undefined, Number(currentL2Slot)); + l2Block = await L2BlockNew.random(BlockNumber(42), { slotNumber: SlotNumber(Number(currentL2Slot)) }); - header = l2Block.getCheckpointHeader(); + header = CheckpointHeader.random({ slotNumber: SlotNumber(Number(currentL2Slot)) }); archive = l2Block.archive.root.toBuffer(); }); @@ -201,13 +202,9 @@ describe('SequencerPublisher', () => { }; it('bundles propose and vote tx to l1', async () => { - const expectedBlobs = getBlobsPerL1Block(l2Block.getCheckpointBlobFields()); - - await publisher.enqueueProposeCheckpoint( - l2Block.toCheckpoint(), - CommitteeAttestationsAndSigners.empty(), - Signature.empty(), - ); + const checkpoint = new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber); + const expectedBlobs = getBlobsPerL1Block(checkpoint.toBlobFields()); + await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); const { govPayload, voteSig } = mockGovernancePayload(); @@ -290,7 +287,7 @@ describe('SequencerPublisher', () => { }); await publisher.enqueueProposeCheckpoint( - l2Block.toCheckpoint(), + new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), CommitteeAttestationsAndSigners.empty(), Signature.empty(), ); @@ -303,7 +300,7 @@ describe('SequencerPublisher', () => { await expect( publisher.enqueueProposeCheckpoint( - l2Block.toCheckpoint(), + new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), CommitteeAttestationsAndSigners.empty(), Signature.empty(), ), @@ -323,7 +320,7 @@ describe('SequencerPublisher', () => { }); await publisher.enqueueProposeCheckpoint( - l2Block.toCheckpoint(), + new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), CommitteeAttestationsAndSigners.empty(), Signature.empty(), ); @@ -347,7 +344,7 @@ describe('SequencerPublisher', () => { }>, ); await publisher.enqueueProposeCheckpoint( - l2Block.toCheckpoint(), + new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), CommitteeAttestationsAndSigners.empty(), Signature.empty(), ); diff --git a/yarn-project/sequencer-client/src/sequencer/block_builder.test.ts b/yarn-project/sequencer-client/src/sequencer/block_builder.test.ts deleted file mode 100644 index 0985357642c0..000000000000 --- a/yarn-project/sequencer-client/src/sequencer/block_builder.test.ts +++ /dev/null @@ -1,247 +0,0 @@ -import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; -import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { timesParallel } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import { createLogger } from '@aztec/foundation/log'; -import { TestDateProvider } from '@aztec/foundation/timer'; -import type { PublicProcessor } from '@aztec/simulator/server'; -import { PublicDataWrite } from '@aztec/stdlib/avm'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { ContractDataSource } from '@aztec/stdlib/contract'; -import { GasFees } from '@aztec/stdlib/gas'; -import { - type PublicProcessorValidator, - WorldStateRunningState, - type WorldStateSynchronizer, - type WorldStateSynchronizerStatus, -} from '@aztec/stdlib/interfaces/server'; -import { makeStateReference, mockTxForRollup } from '@aztec/stdlib/testing'; -import { MerkleTreeId, type MerkleTreeWriteOperations } from '@aztec/stdlib/trees'; -import { - BlockHeader, - type FailedTx, - GlobalVariables, - NestedProcessReturnValues, - type ProcessedTx, - Tx, - makeProcessedTxFromPrivateOnlyTx, -} from '@aztec/stdlib/tx'; - -import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; - -import { FullNodeBlockBuilder } from './block_builder.js'; - -const logger = createLogger('BlockBuilderTest'); - -describe('BlockBuilder', () => { - let blockBuilder: FullNodeBlockBuilder; - let newSlotNumber: number; - let initialBlockHeader: BlockHeader; - const chainId: number = 12345; - const version: number = 1; - let hash: string; - let lastBlockNumber: BlockNumber; - let newBlockNumber: BlockNumber; - let globalVariables: GlobalVariables; - let worldState: MockProxy; - let fork: MockProxy; - let contractDataSource: MockProxy; - let publicProcessor: MockProxy; - let validator: MockProxy; - - const { aztecSlotDuration: slotDuration, ethereumSlotDuration } = DefaultL1ContractsConfig; - - const coinbase = EthAddress.random(); - let feeRecipient: AztecAddress; - const gasFees = GasFees.empty(); - - const mockTxIterator = async function* (txs: Tx[]): AsyncIterableIterator { - for (const tx of txs) { - yield tx; - } - }; - - const makeTx = async (seed?: number) => { - const tx = await mockTxForRollup(seed); - tx.data.constants.txContext.chainId = new Fr(chainId); - return tx; - }; - - class TestBlockBuilder extends FullNodeBlockBuilder { - public override makeBlockBuilderDeps(_globalVariables: GlobalVariables) { - return Promise.resolve({ - publicProcessorDBFork: fork, - processor: publicProcessor, - validator, - }); - } - } - - beforeEach(async () => { - feeRecipient = await AztecAddress.random(); - hash = Fr.ZERO.toString(); - initialBlockHeader = BlockHeader.empty(); - lastBlockNumber = BlockNumber.ZERO; - newBlockNumber = BlockNumber(lastBlockNumber + 1); - newSlotNumber = newBlockNumber + 1; - globalVariables = new GlobalVariables( - new Fr(chainId), - new Fr(version), - newBlockNumber, - SlotNumber(newSlotNumber), - /*timestamp=*/ 0n, - coinbase, - feeRecipient, - gasFees, - ); - - const l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); - const l1Constants = { - l1GenesisTime, - slotDuration, - ethereumSlotDuration, - l1ChainId: chainId, - rollupVersion: version, - }; - - fork = mock({ - getInitialHeader: () => initialBlockHeader, - getTreeInfo: (treeId: MerkleTreeId) => - Promise.resolve({ treeId, root: Fr.random().toBuffer(), size: 1024n, depth: 10 }), - findLeafIndices: (_treeId: MerkleTreeId, _values: any[]) => Promise.resolve([undefined]), - getStateReference: () => Promise.resolve(makeStateReference()), - }); - - worldState = mock({ - fork: () => Promise.resolve(fork), - syncImmediate: () => Promise.resolve(lastBlockNumber), - getCommitted: () => fork, - status: mockFn().mockResolvedValue({ - state: WorldStateRunningState.IDLE, - syncSummary: { - latestBlockNumber: lastBlockNumber, - latestBlockHash: hash, - finalizedBlockNumber: BlockNumber.ZERO, - oldestHistoricBlockNumber: BlockNumber.ZERO, - treesAreSynched: true, - }, - } satisfies WorldStateSynchronizerStatus), - }); - - contractDataSource = mock(); - const dateProvider = new TestDateProvider(); - publicProcessor = mock(); - validator = mock(); - - publicProcessor.process.mockImplementation( - async ( - pendingTxsIterator: AsyncIterable | Iterable, - ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], number]> => { - const processedTxs: ProcessedTx[] = []; - const allTxs: Tx[] = []; - let totalBlobFields = 0; - - for await (const tx of pendingTxsIterator) { - allTxs.push(tx); - const processedTx = makeProcessedTxFromPrivateOnlyTx( - tx, - Fr.ZERO, - new PublicDataWrite(Fr.random(), Fr.random()), - globalVariables, - ); - processedTxs.push(processedTx); - totalBlobFields += processedTx.txEffect.getNumBlobFields(); - } - // Assuming all txs are processed successfully and none failed for this mock - return [processedTxs, [], allTxs, [], totalBlobFields]; - }, - ); - blockBuilder = new TestBlockBuilder(l1Constants, worldState, contractDataSource, dateProvider); - }); - - it('builds a block out of a single tx', async () => { - const tx = await makeTx(); - const iterator = mockTxIterator([tx]); - - const blockResult = await blockBuilder.buildBlock(iterator, [], [], globalVariables, {}); - expect(publicProcessor.process).toHaveBeenCalledTimes(1); - expect(publicProcessor.process).toHaveBeenCalledWith(iterator, {}, validator); - logger.info('Built Block', blockResult.block); - expect(blockResult.block.header.globalVariables.blockNumber).toBe(newBlockNumber); - expect(blockResult.block.header.globalVariables.slotNumber).toBe(newSlotNumber); - expect(blockResult.block.header.globalVariables.coinbase.toString()).toBe(coinbase.toString()); - expect(blockResult.block.header.globalVariables.feeRecipient.toString()).toBe(feeRecipient.toString()); - expect(blockResult.block.header.globalVariables.gasFees).toEqual(GasFees.empty()); - expect(blockResult.block.header.globalVariables.chainId.toNumber()).toBe(chainId); - expect(blockResult.block.header.globalVariables.version.toNumber()).toBe(version); - expect(blockResult.block.body.txEffects.length).toBe(1); - expect(blockResult.block.body.txEffects[0].txHash).toBe(tx.getTxHash()); - }); - - it('builds a block with the correct options', async () => { - const txs = await timesParallel(5, i => makeTx(i * 0x10000)); - const deadline = new Date(Date.now() + 1000); - await blockBuilder.buildBlock(txs, [], [], globalVariables, { - maxTransactions: 4, - deadline, - }); - - expect(publicProcessor.process).toHaveBeenCalledWith( - txs, - { - maxTransactions: 4, - deadline, - }, - validator, - ); - }); - - it('builds a block for validation ignoring limits', async () => { - const txs = await timesParallel(5, i => makeTx(i * 0x10000)); - await blockBuilder.buildBlock(txs, [], [], globalVariables, {}); - - expect(publicProcessor.process).toHaveBeenCalledWith(txs, {}, validator); - }); - - it('builds a block out of several txs rejecting invalid txs', async () => { - const txs = await Promise.all([makeTx(0x10000), makeTx(0x20000), makeTx(0x30000)]); - const validTxs = [txs[0], txs[2]]; - const invalidTx = txs[1]; - const validTxHashes = await Promise.all(validTxs.map(tx => tx.getTxHash())); - - publicProcessor.process.mockImplementation( - async ( - pendingTxsIterator: AsyncIterable | Iterable, - ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], number]> => { - const processedTxs: ProcessedTx[] = []; - const usedTxs: Tx[] = []; - const failedTxs: FailedTx[] = []; - let totalBlobFields = 0; - - for await (const tx of pendingTxsIterator) { - if (validTxHashes.includes(tx.getTxHash())) { - usedTxs.push(tx); - const processedTx = makeProcessedTxFromPrivateOnlyTx( - tx, - Fr.ZERO, - new PublicDataWrite(Fr.random(), Fr.random()), - globalVariables, - ); - - processedTxs.push(processedTx); - totalBlobFields += processedTx.txEffect.getNumBlobFields(); - } else { - failedTxs.push({ tx, error: new Error() }); - } - } - // Assuming all txs are processed successfully and none failed for this mock - return [processedTxs, failedTxs, usedTxs, [], totalBlobFields]; - }, - ); - - const blockResult = await blockBuilder.buildBlock(txs, [], [], globalVariables, {}); - expect(blockResult.failedTxs).toEqual([{ tx: invalidTx, error: new Error() }]); - expect(blockResult.usedTxs).toEqual(validTxs); - }); -}); diff --git a/yarn-project/sequencer-client/src/sequencer/block_builder.ts b/yarn-project/sequencer-client/src/sequencer/block_builder.ts deleted file mode 100644 index 8a850947bfb1..000000000000 --- a/yarn-project/sequencer-client/src/sequencer/block_builder.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { MerkleTreeId } from '@aztec/aztec.js/trees'; -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { merge, pick } from '@aztec/foundation/collection'; -import type { Fr } from '@aztec/foundation/curves/bn254'; -import { createLogger } from '@aztec/foundation/log'; -import { retryUntil } from '@aztec/foundation/retry'; -import { bufferToHex } from '@aztec/foundation/string'; -import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer'; -import { getDefaultAllowedSetupFunctions } from '@aztec/p2p/msg_validators'; -import { LightweightBlockFactory } from '@aztec/prover-client/block-factory'; -import { - GuardedMerkleTreeOperations, - PublicContractsDB, - PublicProcessor, - createPublicTxSimulatorForBlockBuilding, -} from '@aztec/simulator/server'; -import type { ContractDataSource } from '@aztec/stdlib/contract'; -import { type L1RollupConstants, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; -import { Gas } from '@aztec/stdlib/gas'; -import type { - BuildBlockResult, - FullNodeBlockBuilderConfig, - IFullNodeBlockBuilder, - MerkleTreeWriteOperations, - PublicProcessorLimits, - PublicProcessorValidator, - WorldStateSynchronizer, -} from '@aztec/stdlib/interfaces/server'; -import { GlobalVariables, Tx } from '@aztec/stdlib/tx'; -import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; -import { createValidatorForBlockBuilding } from '@aztec/validator-client'; - -const log = createLogger('block-builder'); - -/** Builds a block out of pending txs */ -async function buildBlock( - pendingTxs: Iterable | AsyncIterable, - l1ToL2Messages: Fr[], - previousCheckpointOutHashes: Fr[], - newGlobalVariables: GlobalVariables, - opts: PublicProcessorLimits = {}, - worldStateFork: MerkleTreeWriteOperations, - processor: PublicProcessor, - validator: PublicProcessorValidator, - l1Constants: Pick, - dateProvider: DateProvider, - telemetryClient: TelemetryClient = getTelemetryClient(), -): Promise { - const blockBuildingTimer = new Timer(); - const blockNumber = newGlobalVariables.blockNumber; - const slot = newGlobalVariables.slotNumber; - const msgCount = l1ToL2Messages.length; - const stateReference = await worldStateFork.getStateReference(); - const archiveTree = await worldStateFork.getTreeInfo(MerkleTreeId.ARCHIVE); - - log.verbose(`Building block ${blockNumber} for slot ${slot}`, { - slot, - slotStart: new Date(Number(getTimestampForSlot(slot, l1Constants)) * 1000), - now: new Date(dateProvider.now()), - blockNumber, - msgCount, - initialStateReference: stateReference.toInspect(), - initialArchiveRoot: bufferToHex(archiveTree.root), - opts, - }); - const blockFactory = new LightweightBlockFactory(previousCheckpointOutHashes, worldStateFork, telemetryClient); - await blockFactory.startNewBlock(newGlobalVariables, l1ToL2Messages); - - const [publicProcessorDuration, [processedTxs, failedTxs, usedTxs, _, usedTxBlobFields]] = await elapsed(() => - processor.process(pendingTxs, opts, validator), - ); - - // All real transactions have been added, set the block as full and pad if needed - await blockFactory.addTxs(processedTxs); - const block = await blockFactory.setBlockCompleted(); - - // How much public gas was processed - const publicGas = processedTxs.reduce((acc, tx) => acc.add(tx.gasUsed.publicGas), Gas.empty()); - - const res = { - block, - publicGas, - publicProcessorDuration, - numMsgs: l1ToL2Messages.length, - numTxs: processedTxs.length, - failedTxs: failedTxs, - blockBuildingTimer, - usedTxs, - usedTxBlobFields, - }; - log.trace('Built block', res.block.header); - return res; -} - -const FullNodeBlockBuilderConfigKeys = [ - 'l1GenesisTime', - 'slotDuration', - 'l1ChainId', - 'rollupVersion', - 'txPublicSetupAllowList', - 'fakeProcessingDelayPerTxMs', - 'fakeThrowAfterProcessingTxCount', -] as const; - -// TODO(palla/mbps): Try killing this in favor of the CheckpointsBuilder -export class FullNodeBlockBuilder implements IFullNodeBlockBuilder { - constructor( - private config: FullNodeBlockBuilderConfig, - private worldState: WorldStateSynchronizer, - private contractDataSource: ContractDataSource, - private dateProvider: DateProvider, - private telemetryClient: TelemetryClient = getTelemetryClient(), - ) {} - - public getConfig(): FullNodeBlockBuilderConfig { - return pick(this.config, ...FullNodeBlockBuilderConfigKeys); - } - - public updateConfig(config: Partial) { - this.config = merge(this.config, pick(config, ...FullNodeBlockBuilderConfigKeys)); - } - - public async makeBlockBuilderDeps(globalVariables: GlobalVariables, fork: MerkleTreeWriteOperations) { - const txPublicSetupAllowList = this.config.txPublicSetupAllowList ?? (await getDefaultAllowedSetupFunctions()); - const contractsDB = new PublicContractsDB(this.contractDataSource); - const guardedFork = new GuardedMerkleTreeOperations(fork); - - const publicTxSimulator = createPublicTxSimulatorForBlockBuilding( - guardedFork, - contractsDB, - globalVariables, - this.telemetryClient, - ); - - const processor = new PublicProcessor( - globalVariables, - guardedFork, - contractsDB, - publicTxSimulator, - this.dateProvider, - this.telemetryClient, - undefined, - this.config, - ); - - const validator = createValidatorForBlockBuilding( - fork, - this.contractDataSource, - globalVariables, - txPublicSetupAllowList, - ); - - return { - processor, - validator, - }; - } - - private async syncToPreviousBlock(parentBlockNumber: BlockNumber, timeout: number | undefined) { - await retryUntil( - () => this.worldState.syncImmediate(parentBlockNumber, true).then(syncedTo => syncedTo >= parentBlockNumber), - 'sync to previous block', - timeout, - 0.1, - ); - log.debug(`Synced to previous block ${parentBlockNumber}`); - } - - async buildBlock( - pendingTxs: Iterable | AsyncIterable, - l1ToL2Messages: Fr[], - previousCheckpointOutHashes: Fr[], - globalVariables: GlobalVariables, - opts: PublicProcessorLimits, - suppliedFork?: MerkleTreeWriteOperations, - ): Promise { - const parentBlockNumber = BlockNumber(globalVariables.blockNumber - 1); - const syncTimeout = opts.deadline ? (opts.deadline.getTime() - this.dateProvider.now()) / 1000 : undefined; - await this.syncToPreviousBlock(parentBlockNumber, syncTimeout); - const fork = suppliedFork ?? (await this.worldState.fork(parentBlockNumber)); - - try { - const { processor, validator } = await this.makeBlockBuilderDeps(globalVariables, fork); - const res = await buildBlock( - pendingTxs, - l1ToL2Messages, - previousCheckpointOutHashes, - globalVariables, - opts, - fork, - processor, - validator, - this.config, - this.dateProvider, - this.telemetryClient, - ); - return res; - } finally { - // If the fork was supplied, we don't close it. - // Otherwise, we wait a bit to close the fork we just created, - // since the processor may still be working on a dangling tx - // which was interrupted due to the processingDeadline being hit. - if (!suppliedFork) { - // eslint-disable-next-line @typescript-eslint/no-misused-promises - setTimeout(async () => { - try { - await fork.close(); - } catch (err) { - // This can happen if the sequencer is stopped before we hit this timeout. - log.warn(`Error closing forks for block processing`, err); - } - }, 5000); - } - } - } - - getFork(blockNumber: BlockNumber): Promise { - return this.worldState.fork(blockNumber); - } -} diff --git a/yarn-project/sequencer-client/src/sequencer/index.ts b/yarn-project/sequencer-client/src/sequencer/index.ts index af17a233c568..9f9721707764 100644 --- a/yarn-project/sequencer-client/src/sequencer/index.ts +++ b/yarn-project/sequencer-client/src/sequencer/index.ts @@ -1,4 +1,3 @@ -export * from './block_builder.js'; export * from './checkpoint_proposal_job.js'; export * from './checkpoint_voter.js'; export * from './config.js'; diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index b017fde8fb9a..32cc74a88e34 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -1,25 +1,26 @@ import { type BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { Timer } from '@aztec/foundation/timer'; -import type { FunctionsOf } from '@aztec/foundation/types'; import { L2BlockNew } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { Gas } from '@aztec/stdlib/gas'; -import type { FullNodeBlockBuilderConfig, PublicProcessorLimits } from '@aztec/stdlib/interfaces/server'; +import type { + BuildBlockInCheckpointResult, + FullNodeBlockBuilderConfig, + ICheckpointBlockBuilder, + ICheckpointsBuilder, + MerkleTreeWriteOperations, + PublicProcessorLimits, +} from '@aztec/stdlib/interfaces/server'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { makeAppendOnlyTreeSnapshot } from '@aztec/stdlib/testing'; import type { CheckpointGlobalVariables, Tx } from '@aztec/stdlib/tx'; -import type { - BuildBlockInCheckpointResult, - CheckpointBuilder, - FullNodeCheckpointsBuilder, -} from '@aztec/validator-client'; /** * A fake CheckpointBuilder for testing that implements the same interface as the real one. * Can be seeded with blocks to return sequentially on each `buildBlock` call. */ -export class MockCheckpointBuilder implements FunctionsOf { +export class MockCheckpointBuilder implements ICheckpointBlockBuilder { private blocks: L2BlockNew[] = []; private builtBlocks: L2BlockNew[] = []; private usedTxsPerBlock: Tx[][] = []; @@ -181,7 +182,7 @@ export class MockCheckpointBuilder implements FunctionsOf { * as FullNodeCheckpointsBuilder. Returns MockCheckpointBuilder instances. * Does NOT use jest mocks - this is a proper test double. */ -export class MockCheckpointsBuilder implements FunctionsOf { +export class MockCheckpointsBuilder implements ICheckpointsBuilder { private checkpointBuilder: MockCheckpointBuilder | undefined; /** Track calls for assertions */ @@ -244,8 +245,8 @@ export class MockCheckpointsBuilder implements FunctionsOf { + _fork: MerkleTreeWriteOperations, + ): Promise { this.startCheckpointCalls.push({ checkpointNumber, constants, l1ToL2Messages, previousCheckpointOutHashes }); if (!this.checkpointBuilder) { @@ -253,7 +254,7 @@ export class MockCheckpointsBuilder implements FunctionsOf { + ): Promise { this.openCheckpointCalls.push({ checkpointNumber, constants, @@ -277,7 +278,11 @@ export class MockCheckpointsBuilder implements FunctionsOf { + throw new Error('MockCheckpointsBuilder.getFork not implemented'); } /** Reset for reuse in another test */ diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts index 5c0643ca622d..3c16e6927430 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts @@ -5,8 +5,8 @@ import { sleep } from '@aztec/foundation/sleep'; import { L2BlockNew, type L2BlockSourceEventEmitter, L2BlockSourceEvents } from '@aztec/stdlib/block'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import type { - BuildBlockResult, - IFullNodeBlockBuilder, + ICheckpointBlockBuilder, + ICheckpointsBuilder, ITxProvider, MerkleTreeWriteOperations, } from '@aztec/stdlib/interfaces/server'; @@ -28,7 +28,8 @@ describe('EpochPruneWatcher', () => { let l1ToL2MessageSource: MockProxy; let epochCache: MockProxy; let txProvider: MockProxy>; - let blockBuilder: MockProxy; + let checkpointsBuilder: MockProxy; + let checkpointBuilder: MockProxy; let fork: MockProxy; let ts: bigint; @@ -43,9 +44,11 @@ describe('EpochPruneWatcher', () => { l1ToL2MessageSource.getL1ToL2Messages.mockResolvedValue([]); epochCache = mock(); txProvider = mock>(); - blockBuilder = mock(); + checkpointsBuilder = mock(); + checkpointBuilder = mock(); fork = mock(); - blockBuilder.getFork.mockResolvedValue(fork); + checkpointsBuilder.getFork.mockResolvedValue(fork); + checkpointsBuilder.startCheckpoint.mockResolvedValue(checkpointBuilder); ts = BigInt(Math.ceil(Date.now() / 1000)); l1Constants = { @@ -59,7 +62,7 @@ describe('EpochPruneWatcher', () => { epochCache.getL1Constants.mockReturnValue(l1Constants); - watcher = new EpochPruneWatcher(l2BlockSource, l1ToL2MessageSource, epochCache, txProvider, blockBuilder, { + watcher = new EpochPruneWatcher(l2BlockSource, l1ToL2MessageSource, epochCache, txProvider, checkpointsBuilder, { slashPrunePenalty: validEpochPrunedPenalty, slashDataWithholdingPenalty: dataWithholdingPenalty, }); @@ -131,11 +134,11 @@ describe('EpochPruneWatcher', () => { ); const tx = Tx.random(); txProvider.getAvailableTxs.mockResolvedValue({ txs: [tx], missingTxs: [] }); - blockBuilder.buildBlock.mockResolvedValue({ + checkpointBuilder.buildBlock.mockResolvedValue({ block: block, failedTxs: [], numTxs: 1, - } as unknown as BuildBlockResult); + } as any); const committee: Hex[] = [ '0x0000000000000000000000000000000000000abc', @@ -172,7 +175,13 @@ describe('EpochPruneWatcher', () => { }, ] satisfies WantToSlashArgs[]); - expect(blockBuilder.buildBlock).toHaveBeenCalledWith([tx], [], [], block.header.globalVariables, {}, fork); + expect(checkpointsBuilder.startCheckpoint).toHaveBeenCalled(); + expect(checkpointBuilder.buildBlock).toHaveBeenCalledWith( + [tx], + block.header.globalVariables.blockNumber, + block.header.globalVariables.timestamp, + {}, + ); }); it('should not slash if the data is available but the epoch could not have been proven', async () => { @@ -195,11 +204,11 @@ describe('EpochPruneWatcher', () => { ); const tx = Tx.random(); txProvider.getAvailableTxs.mockResolvedValue({ txs: [tx], missingTxs: [] }); - blockBuilder.buildBlock.mockResolvedValue({ + checkpointBuilder.buildBlock.mockResolvedValue({ block: blockFromBuilder, failedTxs: [], numTxs: 1, - } as unknown as BuildBlockResult); + } as any); const committee: Hex[] = [ '0x0000000000000000000000000000000000000abc', @@ -223,7 +232,13 @@ describe('EpochPruneWatcher', () => { expect(emitSpy).not.toHaveBeenCalled(); - expect(blockBuilder.buildBlock).toHaveBeenCalledWith([tx], [], [], blockFromL1.header.globalVariables, {}, fork); + expect(checkpointsBuilder.startCheckpoint).toHaveBeenCalled(); + expect(checkpointBuilder.buildBlock).toHaveBeenCalledWith( + [tx], + blockFromL1.header.globalVariables.blockNumber, + blockFromL1.header.globalVariables.timestamp, + {}, + ); }); }); diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts index 9c170c85d071..c980d49240c4 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts @@ -12,13 +12,14 @@ import { } from '@aztec/stdlib/block'; import { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; import type { - IFullNodeBlockBuilder, + ICheckpointsBuilder, ITxProvider, MerkleTreeWriteOperations, SlasherConfig, } from '@aztec/stdlib/interfaces/server'; import { type L1ToL2MessageSource, computeCheckpointOutHash } from '@aztec/stdlib/messaging'; import { OffenseType, getOffenseTypeName } from '@aztec/stdlib/slashing'; +import type { CheckpointGlobalVariables } from '@aztec/stdlib/tx'; import { ReExFailedTxsError, ReExStateMismatchError, @@ -53,7 +54,7 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter private l1ToL2MessageSource: L1ToL2MessageSource, private epochCache: EpochCache, private txProvider: Pick, - private blockBuilder: IFullNodeBlockBuilder, + private checkpointsBuilder: ICheckpointsBuilder, penalties: EpochPruneWatcherPenalties, ) { super(); @@ -126,7 +127,7 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter } let previousCheckpointOutHashes: Fr[] = []; - const fork = await this.blockBuilder.getFork(BlockNumber(blocks[0].header.globalVariables.blockNumber - 1)); + const fork = await this.checkpointsBuilder.getFork(BlockNumber(blocks[0].header.globalVariables.blockNumber - 1)); try { for (const block of blocks) { await this.validateBlock(block, previousCheckpointOutHashes, fork); @@ -158,14 +159,27 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter const checkpointNumber = CheckpointNumber.fromBlockNumber(blockFromL1.number); const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(checkpointNumber); - const { block, failedTxs, numTxs } = await this.blockBuilder.buildBlock( - txs, + const gv = blockFromL1.header.globalVariables; + const constants: CheckpointGlobalVariables = { + chainId: gv.chainId, + version: gv.version, + slotNumber: gv.slotNumber, + coinbase: gv.coinbase, + feeRecipient: gv.feeRecipient, + gasFees: gv.gasFees, + }; + + // Use checkpoint builder to validate the block + const checkpointBuilder = await this.checkpointsBuilder.startCheckpoint( + checkpointNumber, + constants, l1ToL2Messages, previousCheckpointOutHashes, - blockFromL1.header.globalVariables, - {}, fork, ); + + const { block, failedTxs, numTxs } = await checkpointBuilder.buildBlock(txs, gv.blockNumber, gv.timestamp, {}); + if (numTxs !== txs.length) { // This should be detected by state mismatch, but this makes it easier to debug. throw new ValidatorError(`Built block with ${numTxs} txs, expected ${txs.length}`); diff --git a/yarn-project/stdlib/src/block/checkpointed_l2_block.ts b/yarn-project/stdlib/src/block/checkpointed_l2_block.ts index 5fc308295c6d..42a9fa202de6 100644 --- a/yarn-project/stdlib/src/block/checkpointed_l2_block.ts +++ b/yarn-project/stdlib/src/block/checkpointed_l2_block.ts @@ -5,9 +5,8 @@ import type { FieldsOf } from '@aztec/foundation/types'; import { z } from 'zod'; -import { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; +import { L1PublishedData } from '../checkpoint/published_checkpoint.js'; import { MAX_BLOCK_HASH_STRING_LENGTH, MAX_COMMITTEE_SIZE } from '../deserialization/index.js'; -import { L2Block } from './l2_block.js'; import { L2BlockNew } from './l2_block_new.js'; import { CommitteeAttestation } from './proposal/committee_attestation.js'; @@ -59,6 +58,7 @@ export class CheckpointedL2Block { public toBuffer(): Buffer { return serializeToBuffer( + this.checkpointNumber, this.block, this.l1.blockNumber, this.l1.blockHash, @@ -68,54 +68,3 @@ export class CheckpointedL2Block { ); } } - -export class PublishedL2Block { - constructor( - public block: L2Block, - public l1: L1PublishedData, - public attestations: CommitteeAttestation[], - ) {} - - static get schema() { - return z - .object({ - block: L2Block.schema, - l1: L1PublishedData.schema, - attestations: z.array(CommitteeAttestation.schema), - }) - .transform(obj => PublishedL2Block.fromFields(obj)); - } - - static fromBuffer(bufferOrReader: Buffer | BufferReader): PublishedL2Block { - const reader = BufferReader.asReader(bufferOrReader); - const block = reader.readObject(L2Block); - const l1BlockNumber = reader.readBigInt(); - const l1BlockHash = reader.readString(MAX_BLOCK_HASH_STRING_LENGTH); - const l1Timestamp = reader.readBigInt(); - const attestations = reader.readVector(CommitteeAttestation, MAX_COMMITTEE_SIZE); - return new PublishedL2Block(block, new L1PublishedData(l1BlockNumber, l1Timestamp, l1BlockHash), attestations); - } - - static fromFields(fields: FieldsOf) { - return new PublishedL2Block(fields.block, fields.l1, fields.attestations); - } - - public toBuffer(): Buffer { - return serializeToBuffer( - this.block, - this.l1.blockNumber, - this.l1.blockHash, - this.l1.timestamp, - this.attestations.length, - this.attestations, - ); - } - - public toPublishedCheckpoint() { - return new PublishedCheckpoint(this.block.toCheckpoint(), this.l1, this.attestations); - } - - static fromPublishedCheckpoint(checkpoint: PublishedCheckpoint) { - return new PublishedL2Block(L2Block.fromCheckpoint(checkpoint.checkpoint), checkpoint.l1, checkpoint.attestations); - } -} diff --git a/yarn-project/stdlib/src/block/in_block.ts b/yarn-project/stdlib/src/block/in_block.ts index c836d540a337..01f5f5ec9dfe 100644 --- a/yarn-project/stdlib/src/block/in_block.ts +++ b/yarn-project/stdlib/src/block/in_block.ts @@ -3,7 +3,7 @@ import { BlockNumber, BlockNumberSchema } from '@aztec/foundation/branded-types' import { type ZodTypeAny, z } from 'zod'; import { L2BlockHash } from './block_hash.js'; -import type { L2Block } from './l2_block.js'; +import type { L2BlockNew } from './l2_block_new.js'; export type InBlock = { l2BlockNumber: BlockNumber; @@ -29,7 +29,7 @@ export function randomDataInBlock(data: T): DataInBlock { }; } -export async function wrapDataInBlock(data: T, block: L2Block): Promise> { +export async function wrapDataInBlock(data: T, block: L2BlockNew): Promise> { return { data, l2BlockNumber: block.number, diff --git a/yarn-project/stdlib/src/block/index.ts b/yarn-project/stdlib/src/block/index.ts index 095dec788fb3..fb213277e7c2 100644 --- a/yarn-project/stdlib/src/block/index.ts +++ b/yarn-project/stdlib/src/block/index.ts @@ -1,6 +1,4 @@ -export * from './l2_block.js'; export * from './l2_block_new.js'; -export * from './l2_block_header.js'; export * from './l2_block_stream/index.js'; export * from './in_block.js'; export * from './body.js'; diff --git a/yarn-project/stdlib/src/block/l2_block.test.ts b/yarn-project/stdlib/src/block/l2_block.test.ts deleted file mode 100644 index c881813f26ec..000000000000 --- a/yarn-project/stdlib/src/block/l2_block.test.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { GENESIS_BLOCK_HEADER_HASH } from '@aztec/constants'; -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { jsonStringify } from '@aztec/foundation/json-rpc'; - -import { L2Block } from './l2_block.js'; -import { L2BlockHeader } from './l2_block_header.js'; - -describe('L2Block', () => { - it('can serialize an L2 block with logs to a buffer and back', async () => { - const block = await L2Block.random(BlockNumber(42)); - - const buffer = block.toBuffer(); - const recovered = L2Block.fromBuffer(buffer); - - expect(recovered).toEqual(block); - }); - - it('convert to and from json', async () => { - const block = await L2Block.random(BlockNumber(42)); - const parsed = L2Block.schema.parse(JSON.parse(jsonStringify(block))); - expect(parsed).toEqual(block); - }); - it('can create an initial block', async () => { - // Values taken from world_state.test.cpp WorldStateTest.GetInitialTreeInfoForAllTrees - const emptyBlockHeader = L2BlockHeader.empty(); - emptyBlockHeader.state.l1ToL2MessageTree.root = Fr.fromString( - '0x0d582c10ff8115413aa5b70564fdd2f3cefe1f33a1e43a47bc495081e91e73e5', - ); - emptyBlockHeader.state.partial.noteHashTree.root = Fr.fromString( - '0x2ac5dda169f6bb3b9ca09bbac34e14c94d1654597db740153a1288d859a8a30a', - ); - emptyBlockHeader.state.partial.nullifierTree.root = Fr.fromString( - '0x1ec3788cd1c32e54d889d67fe29e481114f9d4afe9b44b229aa29d8ad528dd31', - ); - emptyBlockHeader.state.partial.nullifierTree.nextAvailableLeafIndex = 128; - emptyBlockHeader.state.partial.publicDataTree.root = Fr.fromString( - '0x23c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9', - ); - emptyBlockHeader.state.partial.publicDataTree.nextAvailableLeafIndex = 128; - const emptyBlock = L2Block.empty(emptyBlockHeader); - const emptyBlockHash = await emptyBlock.hash(); - expect(emptyBlockHash.equals(GENESIS_BLOCK_HEADER_HASH)).toBeTruthy(); - }); -}); diff --git a/yarn-project/stdlib/src/block/l2_block.ts b/yarn-project/stdlib/src/block/l2_block.ts deleted file mode 100644 index d97476904de5..000000000000 --- a/yarn-project/stdlib/src/block/l2_block.ts +++ /dev/null @@ -1,274 +0,0 @@ -import { type BlockBlobData, encodeBlockBlobData, encodeCheckpointBlobDataFromBlocks } from '@aztec/blob-lib/encoding'; -import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; - -import { z } from 'zod'; - -import { Checkpoint } from '../checkpoint/checkpoint.js'; -import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; -import type { BlockHeader } from '../tx/block_header.js'; -import { Body } from './body.js'; -import { makeAppendOnlyTreeSnapshot, makeL2BlockHeader } from './l2_block_code_to_purge.js'; -import { L2BlockHeader } from './l2_block_header.js'; -import type { L2BlockInfo } from './l2_block_info.js'; -import { L2BlockNew } from './l2_block_new.js'; - -/** - * The data that makes up the rollup proof, with encoder decoder functions. - * - * @deprecated Use `L2BlockNew` instead. - */ -export class L2Block { - constructor( - /** Snapshot of archive tree after the block is applied. */ - public archive: AppendOnlyTreeSnapshot, - /** L2 block header. */ - public header: L2BlockHeader, - /** L2 block body. */ - public body: Body, - private blockHash: Fr | undefined = undefined, - ) {} - - static get schema() { - return z - .object({ - archive: AppendOnlyTreeSnapshot.schema, - header: L2BlockHeader.schema, - body: Body.schema, - }) - .transform(({ archive, header, body }) => new L2Block(archive, header, body)); - } - - /** - * Deserializes a block from a buffer - * @returns A deserialized L2 block. - */ - static fromBuffer(buf: Buffer | BufferReader) { - const reader = BufferReader.asReader(buf); - const header = reader.readObject(L2BlockHeader); - const archive = reader.readObject(AppendOnlyTreeSnapshot); - const body = reader.readObject(Body); - - return new L2Block(archive, header, body); - } - - /** - * Serializes a block - * @returns A serialized L2 block as a Buffer. - */ - toBuffer() { - return serializeToBuffer(this.header, this.archive, this.body); - } - - /** - * Deserializes L2 block from a buffer. - * @param str - A serialized L2 block. - * @returns Deserialized L2 block. - */ - static fromString(str: string): L2Block { - return L2Block.fromBuffer(hexToBuffer(str)); - } - - /** - * Serializes a block to a string. - * @returns A serialized L2 block as a string. - */ - toString(): string { - return bufferToHex(this.toBuffer()); - } - - /** - * Creates an L2 block containing random data. - * @param l2BlockNum - The number of the L2 block. - * @param txsPerBlock - The number of transactions to include in the block. - * @param numPublicCallsPerTx - The number of public function calls to include in each transaction. - * @param numPublicLogsPerCall - The number of public logs per 1 public function invocation. - * @param inHash - The hash of the L1 to L2 messages subtree which got inserted in this block. - * @returns The L2 block. - */ - static async random( - l2BlockNum: BlockNumber, - txsPerBlock = 4, - numPublicCallsPerTx = 3, - numPublicLogsPerCall = 1, - inHash: Fr | undefined = undefined, - slotNumber: number | undefined = undefined, - maxEffects: number | undefined = undefined, - ): Promise { - const body = await Body.random({ txsPerBlock, numPublicCallsPerTx, numPublicLogsPerCall, maxEffects }); - - return new L2Block( - makeAppendOnlyTreeSnapshot(l2BlockNum + 1), - makeL2BlockHeader(0, l2BlockNum, slotNumber ?? l2BlockNum, { inHash }), - body, - ); - } - - /** - * Creates an L2 block containing empty data. - * @param header - An optional header to assign to the block - * @returns The L2 block. - */ - static empty(header?: L2BlockHeader): L2Block { - return new L2Block(AppendOnlyTreeSnapshot.empty(), header ?? L2BlockHeader.empty(), Body.empty()); - } - - get number(): BlockNumber { - return this.header.getBlockNumber(); - } - - get slot(): SlotNumber { - return this.header.getSlot(); - } - - get timestamp(): bigint { - return this.header.globalVariables.timestamp; - } - - /** - * Returns the block's hash (hash of block header). - * @returns The block's hash. - */ - public async hash(): Promise { - if (this.blockHash === undefined) { - this.blockHash = await this.getBlockHeader().hash(); - } - return this.blockHash; - } - - /** - * @deprecated - * This only works when there's one block per checkpoint. - * TODO(#17027): Remove this method from L2Block and create a dedicated Checkpoint class. - */ - public getCheckpointHeader() { - return this.header.toCheckpointHeader(); - } - - // Temporary helper to get the actual block header. - public getBlockHeader(): BlockHeader { - return this.header.toBlockHeader(); - } - - public toL2Block(args: { checkpointNumber?: CheckpointNumber; indexWithinCheckpoint?: number } = {}): L2BlockNew { - return new L2BlockNew( - this.archive, - this.getBlockHeader(), - this.body, - args?.checkpointNumber ?? CheckpointNumber.fromBlockNumber(this.number), - args?.indexWithinCheckpoint ?? 0, - ); - } - - public toCheckpoint() { - return new Checkpoint( - this.archive, - this.getCheckpointHeader(), - [this.toL2Block()], - CheckpointNumber.fromBlockNumber(this.number), - ); - } - - static fromCheckpoint(checkpoint: Checkpoint) { - const checkpointHeader = checkpoint.header; - const block = checkpoint.blocks.at(-1)!; - const header = new L2BlockHeader( - new AppendOnlyTreeSnapshot(checkpointHeader.lastArchiveRoot, block.number), - checkpointHeader.blobsHash, - checkpointHeader.inHash, - checkpointHeader.epochOutHash, - block.header.state, - block.header.globalVariables, - block.header.totalFees, - checkpointHeader.totalManaUsed, - block.header.spongeBlobHash, - checkpointHeader.blockHeadersHash, - ); - return new L2Block(checkpoint.archive, header, block.body); - } - - /** - * @deprecated - * This only works when there's one block per checkpoint. - * TODO(#17027): Remove this method from L2Block and create a dedicated Checkpoint class. - */ - public getCheckpointBlobFields() { - const blockBlobData = this.toBlockBlobData(); - return encodeCheckpointBlobDataFromBlocks([blockBlobData]); - } - - public toBlobFields(): Fr[] { - const blockBlobData = this.toBlockBlobData(); - return encodeBlockBlobData(blockBlobData); - } - - public toBlockBlobData(): BlockBlobData { - // There's only one L2Block per checkpoint, so it's always the first block in the checkpoint. - const isFirstBlock = true; - return { - blockEndMarker: { - numTxs: this.body.txEffects.length, - timestamp: this.header.globalVariables.timestamp, - blockNumber: this.number, - }, - blockEndStateField: { - l1ToL2MessageNextAvailableLeafIndex: this.header.state.l1ToL2MessageTree.nextAvailableLeafIndex, - noteHashNextAvailableLeafIndex: this.header.state.partial.noteHashTree.nextAvailableLeafIndex, - nullifierNextAvailableLeafIndex: this.header.state.partial.nullifierTree.nextAvailableLeafIndex, - publicDataNextAvailableLeafIndex: this.header.state.partial.publicDataTree.nextAvailableLeafIndex, - totalManaUsed: this.header.totalManaUsed.toBigInt(), - }, - lastArchiveRoot: this.header.lastArchive.root, - noteHashRoot: this.header.state.partial.noteHashTree.root, - nullifierRoot: this.header.state.partial.nullifierTree.root, - publicDataRoot: this.header.state.partial.publicDataTree.root, - l1ToL2MessageRoot: isFirstBlock ? this.header.state.l1ToL2MessageTree.root : undefined, - txs: this.body.toTxBlobData(), - }; - } - - /** - * Returns stats used for logging. - * @returns Stats on tx count, number, and log size and count. - */ - getStats() { - const logsStats = { - privateLogCount: this.body.txEffects.reduce((logCount, txEffect) => logCount + txEffect.privateLogs.length, 0), - publicLogCount: this.body.txEffects.reduce((logCount, txEffect) => logCount + txEffect.publicLogs.length, 0), - contractClassLogCount: this.body.txEffects.reduce( - (logCount, txEffect) => logCount + txEffect.contractClassLogs.length, - 0, - ), - contractClassLogSize: this.body.txEffects.reduce( - (totalLogSize, txEffect) => - totalLogSize + txEffect.contractClassLogs.reduce((acc, log) => acc + log.emittedLength, 0), - 0, - ), - }; - - return { - txCount: this.body.txEffects.length, - blockNumber: this.number, - blockTimestamp: Number(this.header.globalVariables.timestamp), - ...logsStats, - }; - } - - toBlockInfo(): L2BlockInfo { - return { - blockHash: this.blockHash, - archive: this.archive.root, - lastArchive: this.header.lastArchive.root, - blockNumber: this.number, - slotNumber: this.header.getSlot(), - txCount: this.body.txEffects.length, - timestamp: this.header.globalVariables.timestamp, - }; - } - - equals(other: L2Block) { - return this.archive.equals(other.archive) && this.header.equals(other.header) && this.body.equals(other.body); - } -} diff --git a/yarn-project/stdlib/src/block/l2_block_code_to_purge.ts b/yarn-project/stdlib/src/block/l2_block_code_to_purge.ts deleted file mode 100644 index 3f6f359aeff3..000000000000 --- a/yarn-project/stdlib/src/block/l2_block_code_to_purge.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { compact } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import type { FieldsOf } from '@aztec/foundation/types'; - -import { AztecAddress } from '../aztec-address/index.js'; -import { GasFees } from '../gas/gas_fees.js'; -import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; -import { GlobalVariables } from '../tx/global_variables.js'; -import { PartialStateReference } from '../tx/partial_state_reference.js'; -import { StateReference } from '../tx/state_reference.js'; -import { L2BlockHeader } from './l2_block_header.js'; - -export function makeL2BlockHeader( - seed = 0, - blockNumber?: number, - slotNumber?: number, - overrides: Partial> = {}, -) { - return new L2BlockHeader( - makeAppendOnlyTreeSnapshot(seed + 0x100), - overrides.blobsHash ?? new Fr(seed + 0x200), - overrides.inHash ?? new Fr(seed + 0x300), - overrides.epochOutHash ?? new Fr(seed + 0x400), - overrides.state ?? makeStateReference(seed + 0x600), - makeGlobalVariables((seed += 0x700), { - ...(blockNumber ? { blockNumber: BlockNumber(blockNumber) } : {}), - ...(slotNumber ? { slotNumber: SlotNumber(slotNumber) } : {}), - }), - new Fr(seed + 0x300), - new Fr(seed + 0x800), - new Fr(seed + 0x900), - new Fr(seed + 0xa00), - ); -} - -/** - * Makes arbitrary append only tree snapshot. - * @param seed - The seed to use for generating the append only tree snapshot. - * @returns An append only tree snapshot. - */ -export function makeAppendOnlyTreeSnapshot(seed = 1): AppendOnlyTreeSnapshot { - return new AppendOnlyTreeSnapshot(new Fr(seed), seed); -} - -/** - * Makes arbitrary state reference. - * @param seed - The seed to use for generating the state reference. - * @returns A state reference. - */ -function makeStateReference(seed = 0): StateReference { - return new StateReference(makeAppendOnlyTreeSnapshot(seed), makePartialStateReference(seed + 1)); -} - -/** - * Makes arbitrary partial state reference. - * @param seed - The seed to use for generating the partial state reference. - * @returns A partial state reference. - */ -function makePartialStateReference(seed = 0): PartialStateReference { - return new PartialStateReference( - makeAppendOnlyTreeSnapshot(seed), - makeAppendOnlyTreeSnapshot(seed + 1), - makeAppendOnlyTreeSnapshot(seed + 2), - ); -} - -function makeGlobalVariables(seed = 1, overrides: Partial> = {}): GlobalVariables { - return GlobalVariables.from({ - chainId: new Fr(seed), - version: new Fr(seed + 1), - blockNumber: BlockNumber(seed + 2), - slotNumber: SlotNumber(seed + 3), - timestamp: BigInt(seed + 4), - coinbase: EthAddress.fromField(new Fr(seed + 5)), - feeRecipient: AztecAddress.fromField(new Fr(seed + 6)), - gasFees: new GasFees(seed + 7, seed + 8), - ...compact(overrides), - }); -} diff --git a/yarn-project/stdlib/src/block/l2_block_header.ts b/yarn-project/stdlib/src/block/l2_block_header.ts deleted file mode 100644 index 405ec7ac85a7..000000000000 --- a/yarn-project/stdlib/src/block/l2_block_header.ts +++ /dev/null @@ -1,268 +0,0 @@ -import { SlotNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { type ZodFor, schemas } from '@aztec/foundation/schemas'; -import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; -import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; -import type { FieldsOf } from '@aztec/foundation/types'; - -import { inspect } from 'util'; -import { z } from 'zod'; - -import { CheckpointHeader } from '../rollup/checkpoint_header.js'; -import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; -import { BlockHeader, GlobalVariables, StateReference } from '../tx/index.js'; - -/** - * TO BE DELETED - * - * A header of an L2 block combining the block header and the checkpoint header. - * This is a temporary workaround to avoid changing too many things before building in chunks is properly implemented. - * This works for now because we only have one block per checkpoint. - * - * @deprecated Use BlockHeader or CheckpointHeader instead. - */ -export class L2BlockHeader { - constructor( - /** Snapshot of archive before the block is applied. */ - public lastArchive: AppendOnlyTreeSnapshot, - public blobsHash: Fr, - public inHash: Fr, - /** Root of the epoch out hash tree. Leaves are the out hashes of this block and all previous blocks in the epoch. */ - public epochOutHash: Fr, - /** State reference. */ - public state: StateReference, - /** Global variables of an L2 block. */ - public globalVariables: GlobalVariables, - /** Total fees in the block, computed by the root rollup circuit */ - public totalFees: Fr, - /** Total mana used in the block, computed by the root rollup circuit */ - public totalManaUsed: Fr, - /** Hash of the sponge blob of the block. */ - public spongeBlobHash: Fr, - /** Hash of the block headers in the checkpoint. */ - public blockHeadersHash: Fr, - ) {} - - static get schema(): ZodFor { - return z - .object({ - lastArchive: AppendOnlyTreeSnapshot.schema, - blobsHash: schemas.Fr, - inHash: schemas.Fr, - epochOutHash: schemas.Fr, - state: StateReference.schema, - globalVariables: GlobalVariables.schema, - totalFees: schemas.Fr, - totalManaUsed: schemas.Fr, - spongeBlobHash: schemas.Fr, - blockHeadersHash: schemas.Fr, - }) - .transform(L2BlockHeader.from); - } - - static getFields(fields: FieldsOf) { - return [ - fields.lastArchive, - fields.blobsHash, - fields.inHash, - fields.epochOutHash, - fields.state, - fields.globalVariables, - fields.totalFees, - fields.totalManaUsed, - fields.spongeBlobHash, - fields.blockHeadersHash, - ] as const; - } - - static from(fields: FieldsOf) { - return new L2BlockHeader(...L2BlockHeader.getFields(fields)); - } - - getSlot(): SlotNumber { - return this.globalVariables.slotNumber; - } - - getBlockNumber() { - return this.globalVariables.blockNumber; - } - - getSize() { - return ( - this.lastArchive.getSize() + - this.blobsHash.size + - this.inHash.size + - this.epochOutHash.size + - this.state.getSize() + - this.globalVariables.getSize() + - this.totalFees.size + - this.totalManaUsed.size + - this.spongeBlobHash.size + - this.blockHeadersHash.size - ); - } - - toBuffer() { - return serializeToBuffer(...L2BlockHeader.getFields(this)); - } - - toFields(): Fr[] { - return serializeToFields(...L2BlockHeader.getFields(this)); - } - - clone() { - return L2BlockHeader.fromBuffer(this.toBuffer()); - } - - static fromBuffer(buffer: Buffer | BufferReader) { - const reader = BufferReader.asReader(buffer); - - return new L2BlockHeader( - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(Fr), - reader.readObject(Fr), - reader.readObject(Fr), - reader.readObject(StateReference), - reader.readObject(GlobalVariables), - reader.readObject(Fr), - reader.readObject(Fr), - reader.readObject(Fr), - reader.readObject(Fr), - ); - } - - static fromFields(fields: Fr[] | FieldReader) { - const reader = FieldReader.asReader(fields); - - return new L2BlockHeader( - AppendOnlyTreeSnapshot.fromFields(reader), - reader.readField(), - reader.readField(), - reader.readField(), - StateReference.fromFields(reader), - GlobalVariables.fromFields(reader), - reader.readField(), - reader.readField(), - reader.readField(), - reader.readField(), - ); - } - - static empty(fields: Partial> = {}) { - return L2BlockHeader.from({ - lastArchive: AppendOnlyTreeSnapshot.empty(), - blobsHash: Fr.ZERO, - inHash: Fr.ZERO, - epochOutHash: Fr.ZERO, - state: StateReference.empty(), - globalVariables: GlobalVariables.empty(), - totalFees: Fr.ZERO, - totalManaUsed: Fr.ZERO, - spongeBlobHash: Fr.ZERO, - blockHeadersHash: Fr.ZERO, - ...fields, - }); - } - - isEmpty(): boolean { - return ( - this.lastArchive.isEmpty() && - this.blobsHash.isZero() && - this.inHash.isZero() && - this.epochOutHash.isZero() && - this.state.isEmpty() && - this.globalVariables.isEmpty() && - this.totalFees.isZero() && - this.totalManaUsed.isZero() && - this.spongeBlobHash.isZero() && - this.blockHeadersHash.isZero() - ); - } - - /** - * Serializes this instance into a string. - * @returns Encoded string. - */ - public toString() { - return bufferToHex(this.toBuffer()); - } - - static fromString(str: string) { - return L2BlockHeader.fromBuffer(hexToBuffer(str)); - } - - toCheckpointHeader() { - return new CheckpointHeader( - this.lastArchive.root, - this.blockHeadersHash, - this.blobsHash, - this.inHash, - this.epochOutHash, - this.globalVariables.slotNumber, - this.globalVariables.timestamp, - this.globalVariables.coinbase, - this.globalVariables.feeRecipient, - this.globalVariables.gasFees, - this.totalManaUsed, - ); - } - - toBlockHeader() { - return new BlockHeader( - this.lastArchive, - this.state, - this.spongeBlobHash, - this.globalVariables, - this.totalFees, - this.totalManaUsed, - ); - } - - toInspect() { - return { - lastArchive: this.lastArchive.root.toString(), - blobsHash: this.blobsHash.toString(), - inHash: this.inHash.toString(), - epochOutHash: this.epochOutHash.toString(), - state: this.state.toInspect(), - globalVariables: this.globalVariables.toInspect(), - totalFees: this.totalFees.toBigInt(), - totalManaUsed: this.totalManaUsed.toBigInt(), - spongeBlobHash: this.spongeBlobHash.toString(), - blockHeadersHash: this.blockHeadersHash.toString(), - }; - } - - [inspect.custom]() { - return `L2BlockHeader { - lastArchive: ${inspect(this.lastArchive)}, - blobsHash: ${inspect(this.blobsHash)}, - inHash: ${inspect(this.inHash)}, - epochOutHash: ${inspect(this.epochOutHash)}, - state.l1ToL2MessageTree: ${inspect(this.state.l1ToL2MessageTree)}, - state.noteHashTree: ${inspect(this.state.partial.noteHashTree)}, - state.nullifierTree: ${inspect(this.state.partial.nullifierTree)}, - state.publicDataTree: ${inspect(this.state.partial.publicDataTree)}, - globalVariables: ${inspect(this.globalVariables)}, - totalFees: ${this.totalFees}, - totalManaUsed: ${this.totalManaUsed}, - spongeBlobHash: ${this.spongeBlobHash}, - blockHeadersHash: ${this.blockHeadersHash}, -}`; - } - - public equals(other: this): boolean { - return ( - this.lastArchive.equals(other.lastArchive) && - this.blobsHash.equals(other.blobsHash) && - this.inHash.equals(other.inHash) && - this.epochOutHash.equals(other.epochOutHash) && - this.state.equals(other.state) && - this.globalVariables.equals(other.globalVariables) && - this.totalFees.equals(other.totalFees) && - this.totalManaUsed.equals(other.totalManaUsed) && - this.spongeBlobHash.equals(other.spongeBlobHash) && - this.blockHeadersHash.equals(other.blockHeadersHash) - ); - } -} diff --git a/yarn-project/stdlib/src/block/l2_block_source.ts b/yarn-project/stdlib/src/block/l2_block_source.ts index 10a6183c7483..374b382b4f2f 100644 --- a/yarn-project/stdlib/src/block/l2_block_source.ts +++ b/yarn-project/stdlib/src/block/l2_block_source.ts @@ -20,8 +20,7 @@ import type { BlockHeader } from '../tx/block_header.js'; import type { IndexedTxEffect } from '../tx/indexed_tx_effect.js'; import type { TxHash } from '../tx/tx_hash.js'; import type { TxReceipt } from '../tx/tx_receipt.js'; -import { type CheckpointedL2Block, PublishedL2Block } from './checkpointed_l2_block.js'; -import type { L2Block } from './l2_block.js'; +import type { CheckpointedL2Block } from './checkpointed_l2_block.js'; import type { L2BlockNew } from './l2_block_new.js'; import type { ValidateCheckpointNegativeResult, ValidateCheckpointResult } from './validate_block_result.js'; @@ -105,6 +104,20 @@ export interface L2BlockSource { */ getL2BlockNew(number: BlockNumber): Promise; + /** + * Gets an L2 block by its hash. + * @param blockHash - The block hash to retrieve. + * @returns The requested L2 block (or undefined if not found). + */ + getL2BlockNewByHash(blockHash: Fr): Promise; + + /** + * Gets an L2 block by its archive root. + * @param archive - The archive root to retrieve. + * @returns The requested L2 block (or undefined if not found). + */ + getL2BlockNewByArchive(archive: Fr): Promise; + /** * Gets a tx effect. * @param txHash - The hash of the tx corresponding to the tx effect. @@ -179,11 +192,8 @@ export interface L2BlockSource { * Gets an l2 block. If a negative number is passed, the block returned is the most recent. * @param number - The block number to return (inclusive). * @returns The requested L2 block. - * @deprecated Use getL2BlockNew instead. */ - getBlock(number: BlockNumber): Promise; - - getL2BlockNew(number: BlockNumber): Promise; + getBlock(number: BlockNumber): Promise; getL2BlocksNew(from: BlockNumber, limit: number, proven?: boolean): Promise; @@ -192,7 +202,7 @@ export interface L2BlockSource { * @dev Use this method only with recent epochs, since it walks the block list backwards. * @param epochNumber - The epoch number to return blocks for. */ - getBlocksForEpoch(epochNumber: EpochNumber): Promise; + getBlocksForEpoch(epochNumber: EpochNumber): Promise; /** * Returns all blocks for a given slot. @@ -206,14 +216,14 @@ export interface L2BlockSource { * @param blockHash - The block hash to retrieve. * @returns The requested block (or undefined if not found). */ - getPublishedBlockByHash(blockHash: Fr): Promise; + getPublishedBlockByHash(blockHash: Fr): Promise; /** * Gets a published block by its archive root. * @param archive - The archive root to retrieve. * @returns The requested block (or undefined if not found). */ - getPublishedBlockByArchive(archive: Fr): Promise; + getPublishedBlockByArchive(archive: Fr): Promise; /** * Gets up to `limit` amount of L2 blocks starting from `from`. @@ -222,10 +232,10 @@ export interface L2BlockSource { * @param proven - If true, only return blocks that have been proven. * @returns The requested L2 blocks. */ - getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise; + getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise; /** Equivalent to getBlocks but includes publish data. */ - getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise; + getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise; } /** diff --git a/yarn-project/stdlib/src/block/published_l2_block.test.ts b/yarn-project/stdlib/src/block/published_l2_block.test.ts deleted file mode 100644 index 6191190228bc..000000000000 --- a/yarn-project/stdlib/src/block/published_l2_block.test.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { jsonStringify } from '@aztec/foundation/json-rpc'; -import { randomPublishedL2Block } from '@aztec/stdlib/testing'; - -import { PublishedL2Block } from './checkpointed_l2_block.js'; - -describe('PublishedL2Block', () => { - it('convert to and from json', async () => { - const block = await randomPublishedL2Block(1); - const parsed = PublishedL2Block.schema.parse(JSON.parse(jsonStringify(block))); - expect(parsed).toEqual(block); - }); - - it('serializes and deserializes to buffer', async () => { - const block = await randomPublishedL2Block(1); - const serialized = block.toBuffer(); - const deserialized = PublishedL2Block.fromBuffer(serialized); - expect(deserialized).toEqual(block); - }); -}); diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index b4d019a3e962..6e43df8c32f7 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -9,9 +9,8 @@ import omit from 'lodash.omit'; import type { ContractArtifact } from '../abi/abi.js'; import { FunctionSelector } from '../abi/function_selector.js'; import { AztecAddress } from '../aztec-address/index.js'; -import { CheckpointedL2Block, PublishedL2Block } from '../block/checkpointed_l2_block.js'; +import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { CommitteeAttestation, L2BlockHash, L2BlockNew } from '../block/index.js'; -import { L2Block } from '../block/l2_block.js'; import type { L2Tips } from '../block/l2_block_source.js'; import type { ValidateCheckpointResult } from '../block/validate_block_result.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; @@ -88,7 +87,7 @@ describe('ArchiverApiSchema', () => { it('getBlock', async () => { const result = await context.client.getBlock(BlockNumber(1)); - expect(result).toBeInstanceOf(L2Block); + expect(result).toBeInstanceOf(L2BlockNew); }); it('getBlockHeader', async () => { @@ -111,9 +110,19 @@ describe('ArchiverApiSchema', () => { expect(result).toBeInstanceOf(L2BlockNew); }); + it('getL2BlockNewByHash', async () => { + const result = await context.client.getL2BlockNewByHash(Fr.random()); + expect(result).toBeInstanceOf(L2BlockNew); + }); + + it('getL2BlockNewByArchive', async () => { + const result = await context.client.getL2BlockNewByArchive(Fr.random()); + expect(result).toBeInstanceOf(L2BlockNew); + }); + it('getBlocks', async () => { const result = await context.client.getBlocks(BlockNumber(1), BlockNumber(1)); - expect(result).toEqual([expect.any(L2Block)]); + expect(result).toEqual([expect.any(L2BlockNew)]); }); it('getPublishedCheckpoints', async () => { @@ -127,7 +136,7 @@ describe('ArchiverApiSchema', () => { it('getPublishedBlocks', async () => { const response = await context.client.getPublishedBlocks(BlockNumber(1), BlockNumber(1)); expect(response).toHaveLength(1); - expect(response[0].block.constructor.name).toEqual('L2Block'); + expect(response[0].block.constructor.name).toEqual('L2BlockNew'); expect(response[0].attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(response[0].l1).toBeDefined(); }); @@ -135,7 +144,7 @@ describe('ArchiverApiSchema', () => { it('getPublishedBlockByArchive', async () => { const result = await context.client.getPublishedBlockByArchive(Fr.random()); expect(result).toBeDefined(); - expect(result!.block.constructor.name).toEqual('L2Block'); + expect(result!.block.constructor.name).toEqual('L2BlockNew'); expect(result!.attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(result!.l1).toBeDefined(); }); @@ -143,7 +152,7 @@ describe('ArchiverApiSchema', () => { it('getPublishedBlockByHash', async () => { const result = await context.client.getPublishedBlockByHash(Fr.random()); expect(result).toBeDefined(); - expect(result!.block.constructor.name).toEqual('L2Block'); + expect(result!.block.constructor.name).toEqual('L2BlockNew'); expect(result!.attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(result!.l1).toBeDefined(); }); @@ -191,7 +200,7 @@ describe('ArchiverApiSchema', () => { it('getBlocksForEpoch', async () => { const result = await context.client.getBlocksForEpoch(EpochNumber(1)); - expect(result).toEqual([expect.any(L2Block)]); + expect(result).toEqual([expect.any(L2BlockNew)]); }); it('getBlocksForSlot', async () => { @@ -376,8 +385,8 @@ class MockArchiver implements ArchiverApi { getProvenBlockNumber(): Promise { return Promise.resolve(BlockNumber(1)); } - getBlock(number: BlockNumber): Promise { - return Promise.resolve(L2Block.random(number)); + getBlock(number: BlockNumber): Promise { + return L2BlockNew.random(number); } getBlockHeader(_number: BlockNumber | 'latest'): Promise { return Promise.resolve(BlockHeader.empty()); @@ -402,8 +411,8 @@ class MockArchiver implements ArchiverApi { }), ]; } - async getBlocks(from: BlockNumber, _limit: number, _proven?: boolean): Promise { - return [await L2Block.random(from)]; + async getBlocks(from: BlockNumber, _limit: number, _proven?: boolean): Promise { + return [await L2BlockNew.random(from)]; } async getPublishedCheckpoints(from: CheckpointNumber, _limit: number): Promise { return [ @@ -417,10 +426,11 @@ class MockArchiver implements ArchiverApi { getCheckpointByArchive(_archive: Fr): Promise { return Promise.resolve(Checkpoint.random()); } - async getPublishedBlocks(from: BlockNumber, _limit: number, _proven?: boolean): Promise { + async getPublishedBlocks(from: BlockNumber, _limit: number, _proven?: boolean): Promise { return [ - PublishedL2Block.fromFields({ - block: await L2Block.random(from), + CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(1), + block: await L2BlockNew.random(from), attestations: [CommitteeAttestation.random()], l1: new L1PublishedData(1n, 0n, `0x`), }), @@ -432,16 +442,18 @@ class MockArchiver implements ArchiverApi { return [block]; } - async getPublishedBlockByHash(_blockHash: Fr): Promise { - return PublishedL2Block.fromFields({ - block: await L2Block.random(BlockNumber(1)), + async getPublishedBlockByHash(_blockHash: Fr): Promise { + return CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(1), + block: await L2BlockNew.random(BlockNumber(1)), attestations: [CommitteeAttestation.random()], l1: new L1PublishedData(1n, 0n, `0x`), }); } - async getPublishedBlockByArchive(_archive: Fr): Promise { - return PublishedL2Block.fromFields({ - block: await L2Block.random(BlockNumber(1)), + async getPublishedBlockByArchive(_archive: Fr): Promise { + return CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(1), + block: await L2BlockNew.random(BlockNumber(1)), attestations: [CommitteeAttestation.random()], l1: new L1PublishedData(1n, 0n, `0x`), }); @@ -455,6 +467,12 @@ class MockArchiver implements ArchiverApi { getL2BlockNew(number: BlockNumber): Promise { return L2BlockNew.random(number); } + getL2BlockNewByHash(_blockHash: Fr): Promise { + return L2BlockNew.random(BlockNumber(1)); + } + getL2BlockNewByArchive(_archive: Fr): Promise { + return L2BlockNew.random(BlockNumber(1)); + } async getTxEffect(_txHash: TxHash): Promise { expect(_txHash).toBeInstanceOf(TxHash); return { @@ -478,9 +496,9 @@ class MockArchiver implements ArchiverApi { expect(epochNumber).toEqual(EpochNumber(1)); return [await Checkpoint.random(CheckpointNumber(BlockNumber(1)))]; } - async getBlocksForEpoch(epochNumber: EpochNumber): Promise { + async getBlocksForEpoch(epochNumber: EpochNumber): Promise { expect(epochNumber).toEqual(EpochNumber(1)); - return [await L2Block.random(BlockNumber(Number(epochNumber)))]; + return [await L2BlockNew.random(BlockNumber(Number(epochNumber)))]; } async getBlocksForSlot(slotNumber: SlotNumber): Promise { expect(slotNumber).toEqual(SlotNumber(1)); @@ -488,8 +506,8 @@ class MockArchiver implements ArchiverApi { } async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise { expect(epochNumber).toEqual(EpochNumber(1)); - const block = await L2Block.random(BlockNumber(Number(epochNumber))); - return [block.getBlockHeader()]; + const block = await L2BlockNew.random(BlockNumber(Number(epochNumber))); + return [block.header]; } isEpochComplete(epochNumber: EpochNumber): Promise { expect(epochNumber).toEqual(EpochNumber(1)); diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 9d5df75a28a4..7d1275b8b2c4 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -4,8 +4,7 @@ import type { ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; -import { CheckpointedL2Block, PublishedL2Block } from '../block/checkpointed_l2_block.js'; -import { L2Block } from '../block/l2_block.js'; +import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { L2BlockNew } from '../block/l2_block_new.js'; import { type L2BlockSource, L2TipsSchema } from '../block/l2_block_source.js'; import { ValidateCheckpointResultSchema } from '../block/validate_block_result.js'; @@ -83,7 +82,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getRegistryAddress: z.function().args().returns(schemas.EthAddress), getBlockNumber: z.function().args().returns(BlockNumberSchema), getProvenBlockNumber: z.function().args().returns(BlockNumberSchema), - getBlock: z.function().args(BlockNumberSchema).returns(L2Block.schema.optional()), + getBlock: z.function().args(BlockNumberSchema).returns(L2BlockNew.schema.optional()), getBlockHeader: z .function() .args(z.union([BlockNumberSchema, z.literal('latest')])) @@ -96,7 +95,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getBlocks: z .function() .args(BlockNumberSchema, schemas.Integer, optional(z.boolean())) - .returns(z.array(L2Block.schema)), + .returns(z.array(L2BlockNew.schema)), getPublishedCheckpoints: z .function() .args(CheckpointNumberSchema, schemas.Integer) @@ -104,22 +103,24 @@ export const ArchiverApiSchema: ApiSchemaFor = { getPublishedBlocks: z .function() .args(BlockNumberSchema, schemas.Integer, optional(z.boolean())) - .returns(z.array(PublishedL2Block.schema)), + .returns(z.array(CheckpointedL2Block.schema)), getL2BlocksNew: z .function() .args(BlockNumberSchema, schemas.Integer, optional(z.boolean())) .returns(z.array(L2BlockNew.schema)), - getPublishedBlockByHash: z.function().args(schemas.Fr).returns(PublishedL2Block.schema.optional()), - getPublishedBlockByArchive: z.function().args(schemas.Fr).returns(PublishedL2Block.schema.optional()), + getPublishedBlockByHash: z.function().args(schemas.Fr).returns(CheckpointedL2Block.schema.optional()), + getPublishedBlockByArchive: z.function().args(schemas.Fr).returns(CheckpointedL2Block.schema.optional()), getBlockHeaderByHash: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), getL2BlockNew: z.function().args(BlockNumberSchema).returns(L2BlockNew.schema.optional()), + getL2BlockNewByHash: z.function().args(schemas.Fr).returns(L2BlockNew.schema.optional()), + getL2BlockNewByArchive: z.function().args(schemas.Fr).returns(L2BlockNew.schema.optional()), getTxEffect: z.function().args(TxHash.schema).returns(indexedTxSchema().optional()), getSettledTxReceipt: z.function().args(TxHash.schema).returns(TxReceipt.schema.optional()), getL2SlotNumber: z.function().args().returns(schemas.SlotNumber.optional()), getL2EpochNumber: z.function().args().returns(EpochNumberSchema.optional()), getCheckpointsForEpoch: z.function().args(EpochNumberSchema).returns(z.array(Checkpoint.schema)), - getBlocksForEpoch: z.function().args(EpochNumberSchema).returns(z.array(L2Block.schema)), + getBlocksForEpoch: z.function().args(EpochNumberSchema).returns(z.array(L2BlockNew.schema)), getBlocksForSlot: z.function().args(schemas.SlotNumber).returns(z.array(L2BlockNew.schema)), getBlockHeadersForEpoch: z.function().args(EpochNumberSchema).returns(z.array(BlockHeader.schema)), isEpochComplete: z.function().args(EpochNumberSchema).returns(z.boolean()), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 208884d35a0c..aeca3ed8429c 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -21,10 +21,9 @@ import times from 'lodash.times'; import type { ContractArtifact } from '../abi/abi.js'; import { AztecAddress } from '../aztec-address/index.js'; -import { PublishedL2Block } from '../block/checkpointed_l2_block.js'; +import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import type { DataInBlock } from '../block/in_block.js'; import { type BlockParameter, CommitteeAttestation, L2BlockHash, L2BlockNew } from '../block/index.js'; -import { L2Block } from '../block/l2_block.js'; import type { L2Tips } from '../block/l2_block_source.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; import { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; @@ -192,12 +191,17 @@ describe('AztecNodeApiSchema', () => { it('getBlock', async () => { const response = await context.client.getBlock(BlockNumber(1)); - expect(response).toBeInstanceOf(L2Block); + expect(response).toBeInstanceOf(L2BlockNew); + }); + + it('getBlockByHash', async () => { + const response = await context.client.getBlockByHash(Fr.random()); + expect(response).toBeInstanceOf(L2BlockNew); }); it('getBlockByArchive', async () => { const response = await context.client.getBlockByArchive(Fr.random()); - expect(response).toBeInstanceOf(L2Block); + expect(response).toBeInstanceOf(L2BlockNew); }); it('getBlockHeader', async () => { @@ -251,7 +255,7 @@ describe('AztecNodeApiSchema', () => { it('getBlocks', async () => { const response = await context.client.getBlocks(BlockNumber(1), BlockNumber(1)); expect(response).toHaveLength(1); - expect(response[0]).toBeInstanceOf(L2Block); + expect(response[0]).toBeInstanceOf(L2BlockNew); await expect(context.client.getBlocks(-1 as BlockNumber, BlockNumber(1))).rejects.toThrow(); await expect(context.client.getBlocks(BlockNumber.ZERO, BlockNumber(1))).rejects.toThrow(); @@ -272,7 +276,7 @@ describe('AztecNodeApiSchema', () => { it('getPublishedBlocks', async () => { const response = await context.client.getPublishedBlocks(BlockNumber(1), BlockNumber(1)); expect(response).toHaveLength(1); - expect(response[0].block.constructor.name).toEqual('L2Block'); + expect(response[0].block.constructor.name).toEqual('L2BlockNew'); expect(response[0].attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(response[0].l1).toBeDefined(); }); @@ -677,17 +681,15 @@ class MockAztecNode implements AztecNode { expect(leafSlot).toBeInstanceOf(Fr); return Promise.resolve(PublicDataWitness.random()); } - getBlock(block: BlockParameter): Promise { - if (block instanceof L2BlockHash) { - // For block hash, we need to get the actual block number first - // Since this is a mock, we'll use a default block number - return Promise.resolve(L2Block.random(BlockNumber(1))); - } - const blockNum = block === 'latest' ? BlockNumber(1) : block; - return Promise.resolve(L2Block.random(blockNum)); + getBlock(number: BlockParameter): Promise { + const blockNum = number === 'latest' ? BlockNumber(1) : (number as BlockNumber); + return L2BlockNew.random(blockNum); + } + getBlockByHash(_blockHash: Fr): Promise { + return L2BlockNew.random(BlockNumber(1)); } - getBlockByArchive(_archive: Fr): Promise { - return Promise.resolve(L2Block.random(BlockNumber(1))); + getBlockByArchive(_archive: Fr): Promise { + return L2BlockNew.random(BlockNumber(1)); } getBlockHeaderByArchive(_archive: Fr): Promise { return Promise.resolve(BlockHeader.empty()); @@ -722,17 +724,18 @@ class MockAztecNode implements AztecNode { protocolContractAddresses: Object.fromEntries(protocolContracts) as ProtocolContractAddresses, }; } - getBlocks(from: number, limit: number): Promise { + getBlocks(from: number, limit: number): Promise { return Promise.all( Array(limit) .fill(0) - .map(i => L2Block.random(BlockNumber(from + i))), + .map(i => L2BlockNew.random(BlockNumber(from + i))), ); } - getPublishedBlocks(from: number, limit: number): Promise { + getPublishedBlocks(from: number, limit: number): Promise { return timesAsync(limit, async i => - PublishedL2Block.fromFields({ - block: await L2Block.random(BlockNumber(from + i)), + CheckpointedL2Block.fromFields({ + checkpointNumber: CheckpointNumber(from + i), + block: await L2BlockNew.random(BlockNumber(from + i)), attestations: [CommitteeAttestation.random()], l1: new L1PublishedData(1n, 1n, Buffer32.random().toString()), }), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.ts b/yarn-project/stdlib/src/interfaces/aztec-node.ts index e7ff6d13909a..f97e227a6e8c 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.ts @@ -24,9 +24,8 @@ import { z } from 'zod'; import type { AztecAddress } from '../aztec-address/index.js'; import { type BlockParameter, BlockParameterSchema } from '../block/block_parameter.js'; -import { CheckpointedL2Block, PublishedL2Block } from '../block/checkpointed_l2_block.js'; +import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { type DataInBlock, dataInBlockSchemaFor } from '../block/in_block.js'; -import { L2Block } from '../block/l2_block.js'; import { L2BlockNew } from '../block/l2_block_new.js'; import { type L2BlockSource, type L2Tips, L2TipsSchema } from '../block/l2_block_source.js'; import { PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; @@ -229,18 +228,25 @@ export interface AztecNode getL2ToL1Messages(epoch: EpochNumber): Promise; /** - * Get a block specified by its block number, block hash, or 'latest'. - * @param block - The block parameter (block number, block hash, or 'latest'). + * Get a block specified by its block number or 'latest'. + * @param number - The block number or 'latest'. * @returns The requested block. */ - getBlock(block: BlockParameter): Promise; + getBlock(number: BlockParameter): Promise; + + /** + * Get a block specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block. + */ + getBlockByHash(blockHash: Fr): Promise; /** * Get a block specified by its archive root. * @param archive - The archive root being requested. * @returns The requested block. */ - getBlockByArchive(archive: Fr): Promise; + getBlockByArchive(archive: Fr): Promise; /** * Method to fetch the latest block number synchronized by the node. @@ -273,7 +279,7 @@ export interface AztecNode * @param limit - The maximum number of blocks to return. * @returns The blocks requested. */ - getBlocks(from: BlockNumber, limit: number): Promise; + getBlocks(from: BlockNumber, limit: number): Promise; /** * Method to fetch the current min fees. @@ -548,9 +554,11 @@ export const AztecNodeApiSchema: ApiSchemaFor = { .args(EpochNumberSchema) .returns(z.array(z.array(z.array(z.array(schemas.Fr))))), - getBlock: z.function().args(BlockParameterSchema).returns(L2Block.schema.optional()), + getBlock: z.function().args(BlockParameterSchema).returns(L2BlockNew.schema.optional()), - getBlockByArchive: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), + getBlockByHash: z.function().args(schemas.Fr).returns(L2BlockNew.schema.optional()), + + getBlockByArchive: z.function().args(schemas.Fr).returns(L2BlockNew.schema.optional()), getBlockNumber: z.function().returns(BlockNumberSchema), @@ -563,12 +571,12 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getBlocks: z .function() .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN)) - .returns(z.array(L2Block.schema)), + .returns(z.array(L2BlockNew.schema)), getPublishedBlocks: z .function() .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN)) - .returns(z.array(PublishedL2Block.schema)), + .returns(z.array(CheckpointedL2Block.schema)), getPublishedCheckpoints: z .function() diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index b71cd11f622a..f32475853677 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -1,17 +1,16 @@ -import type { BlockNumber } from '@aztec/foundation/branded-types'; +import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/curves/bn254'; -import type { Timer } from '@aztec/foundation/timer'; -import type { L2Block } from '../block/l2_block.js'; +import type { L2BlockNew } from '../block/l2_block_new.js'; import type { ChainConfig, SequencerConfig } from '../config/chain-config.js'; import type { L1RollupConstants } from '../epoch-helpers/index.js'; import type { Gas } from '../gas/gas.js'; -import type { MerkleTreeWriteOperations } from '../trees/index.js'; import type { BlockHeader } from '../tx/block_header.js'; -import type { GlobalVariables } from '../tx/global_variables.js'; +import type { CheckpointGlobalVariables, GlobalVariables } from '../tx/global_variables.js'; import type { FailedTx, ProcessedTx } from '../tx/processed_tx.js'; import { Tx } from '../tx/tx.js'; import type { TxValidator } from '../tx/validator/tx_validator.js'; +import type { MerkleTreeWriteOperations } from './merkle_tree_operations.js'; import type { ProcessedTxHandler } from './processed-tx-handler.js'; /** The interface to a block builder. Generates an L2 block out of a set of processed txs. */ @@ -32,7 +31,7 @@ export interface IBlockFactory extends ProcessedTxHandler { /** * Assembles the block and updates the archive tree. */ - setBlockCompleted(expectedBlockHeader?: BlockHeader): Promise; + setBlockCompleted(expectedBlockHeader?: BlockHeader): Promise; } export interface PublicProcessorLimits { @@ -47,17 +46,6 @@ export interface PublicProcessorValidator { preprocessValidator?: TxValidator; nullifierCache?: { addNullifiers: (nullifiers: Buffer[]) => void }; } -export interface BuildBlockResult { - block: L2Block; - publicGas: Gas; - publicProcessorDuration: number; - numMsgs: number; - numTxs: number; - failedTxs: FailedTx[]; - blockBuildingTimer: Timer; - usedTxs: Tx[]; - usedTxBlobFields: number; -} export type FullNodeBlockBuilderConfig = Pick & Pick & @@ -73,19 +61,36 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'fakeThrowAfterProcessingTxCount', ] as const; -export interface IFullNodeBlockBuilder { - getConfig(): FullNodeBlockBuilderConfig; - - updateConfig(config: Partial): void; +/** Result of building a block within a checkpoint. */ +export interface BuildBlockInCheckpointResult { + block: L2BlockNew; + publicGas: Gas; + publicProcessorDuration: number; + numTxs: number; + failedTxs: FailedTx[]; + usedTxs: Tx[]; + usedTxBlobFields: number; +} +/** Interface for building blocks within a checkpoint context. */ +export interface ICheckpointBlockBuilder { buildBlock( - txs: Iterable | AsyncIterable, - l1ToL2Messages: Fr[], - previousCheckpointOutHashes: Fr[], - globalVariables: GlobalVariables, - options: PublicProcessorLimits, - fork?: MerkleTreeWriteOperations, - ): Promise; + pendingTxs: Iterable | AsyncIterable, + blockNumber: BlockNumber, + timestamp: bigint, + opts: PublicProcessorLimits, + ): Promise; +} +/** Interface for creating checkpoint builders. */ +export interface ICheckpointsBuilder { getFork(blockNumber: BlockNumber): Promise; + + startCheckpoint( + checkpointNumber: CheckpointNumber, + constants: CheckpointGlobalVariables, + l1ToL2Messages: Fr[], + previousCheckpointOutHashes: Fr[], + fork: MerkleTreeWriteOperations, + ): Promise; } diff --git a/yarn-project/stdlib/src/p2p/block_proposal.ts b/yarn-project/stdlib/src/p2p/block_proposal.ts index a27a18bb51e5..6e4d6bdaa626 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.ts @@ -8,8 +8,8 @@ import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; -import type { L2Block } from '../block/l2_block.js'; import type { L2BlockInfo } from '../block/l2_block_info.js'; +import type { L2BlockNew } from '../block/l2_block_new.js'; import { MAX_TXS_PER_BLOCK } from '../deserialization/index.js'; import { BlockHeader } from '../tx/block_header.js'; import { TxHash } from '../tx/index.js'; @@ -284,11 +284,11 @@ export class BlockProposal extends Gossipable { /** * Check if this proposal matches the given block. * Compares the archive root and block header. - * @param block - The L2Block to compare against + * @param block - The L2BlockNew to compare against * @returns True if the proposal matches the block */ - matchesBlock(block: L2Block): boolean { - return this.archiveRoot.equals(block.archive.root) && this.blockHeader.equals(block.getBlockHeader()); + matchesBlock(block: L2BlockNew): boolean { + return this.archiveRoot.equals(block.archive.root) && this.blockHeader.equals(block.header); } /** diff --git a/yarn-project/stdlib/src/p2p/consensus_payload.ts b/yarn-project/stdlib/src/p2p/consensus_payload.ts index e8a257aaa97c..b7341e9ffe2b 100644 --- a/yarn-project/stdlib/src/p2p/consensus_payload.ts +++ b/yarn-project/stdlib/src/p2p/consensus_payload.ts @@ -7,7 +7,6 @@ import type { FieldsOf } from '@aztec/foundation/types'; import { encodeAbiParameters, parseAbiParameters } from 'viem'; import { z } from 'zod'; -import type { L2Block } from '../block/l2_block.js'; import type { Checkpoint } from '../checkpoint/checkpoint.js'; import { CheckpointHeader } from '../rollup/checkpoint_header.js'; import type { CheckpointProposal, CheckpointProposalCore } from './checkpoint_proposal.js'; @@ -76,10 +75,6 @@ export class ConsensusPayload implements Signable { return new ConsensusPayload(fields.header, fields.archive); } - static fromBlock(block: L2Block): ConsensusPayload { - return new ConsensusPayload(block.header.toCheckpointHeader(), block.archive.root); - } - static fromCheckpoint(checkpoint: Checkpoint): ConsensusPayload { return new ConsensusPayload(checkpoint.header, checkpoint.archive.root); } diff --git a/yarn-project/stdlib/src/tests/factories.ts b/yarn-project/stdlib/src/tests/factories.ts index 2505a384dafa..abc97d9871b7 100644 --- a/yarn-project/stdlib/src/tests/factories.ts +++ b/yarn-project/stdlib/src/tests/factories.ts @@ -87,7 +87,6 @@ import { import { PublicDataRead } from '../avm/public_data_read.js'; import { PublicDataWrite } from '../avm/public_data_write.js'; import { AztecAddress } from '../aztec-address/index.js'; -import { L2BlockHeader } from '../block/l2_block_header.js'; import type { L2Tips } from '../block/l2_block_source.js'; import { type ContractClassPublic, @@ -911,30 +910,7 @@ export function makeBlockHeader( }); } -export function makeL2BlockHeader( - seed = 0, - blockNumber?: number, - slotNumber?: number, - overrides: Partial> = {}, -) { - return new L2BlockHeader( - makeAppendOnlyTreeSnapshot(seed + 0x100), - overrides?.blobsHash ?? fr(seed + 0x200), - overrides?.inHash ?? fr(seed + 0x300), - overrides?.epochOutHash ?? fr(seed + 0x400), - overrides?.state ?? makeStateReference(seed + 0x600), - makeGlobalVariables((seed += 0x700), { - ...(blockNumber !== undefined ? { blockNumber: BlockNumber(blockNumber) } : {}), - ...(slotNumber !== undefined ? { slotNumber: SlotNumber(slotNumber) } : {}), - }), - new Fr(seed + 0x800), - new Fr(seed + 0x900), - new Fr(seed + 0xa00), - new Fr(seed + 0xb00), - ); -} - -export function makeCheckpointHeader(seed = 0) { +export function makeCheckpointHeader(seed = 0, overrides: Partial> = {}) { return CheckpointHeader.from({ lastArchiveRoot: fr(seed + 0x100), blockHeadersHash: fr(seed + 0x150), @@ -947,6 +923,7 @@ export function makeCheckpointHeader(seed = 0) { feeRecipient: makeAztecAddress(seed + 0x600), gasFees: makeGasFees(seed + 0x700), totalManaUsed: fr(seed + 0x800), + ...overrides, }); } diff --git a/yarn-project/stdlib/src/tests/jest.ts b/yarn-project/stdlib/src/tests/jest.ts index aa67b937d350..8135a1a94367 100644 --- a/yarn-project/stdlib/src/tests/jest.ts +++ b/yarn-project/stdlib/src/tests/jest.ts @@ -1,26 +1,26 @@ -import { L2Block } from '../block/l2_block.js'; +import { L2BlockNew } from '../block/l2_block_new.js'; /** - * Checks if two objects are the same L2Block. + * Checks if two objects are the same L2BlockNew. * - * Sometimes we might be comparing two L2Block instances that represent the same block but one of them might not have + * Sometimes we might be comparing two L2BlockNew instances that represent the same block but one of them might not have * calculated and filled its `blockHash` property (which is computed on demand). This function ensures both objects - * are really the same L2Block. + * are really the same L2BlockNew. * * @param a - An object * @param b - Another object - * @returns True if both a and b are the same L2Block + * @returns True if both a and b are the same L2BlockNew */ export function equalL2Blocks(a: any, b: any) { - const aAsL2Block = a && a instanceof L2Block ? a : undefined; - const bAsL2Block = b && b instanceof L2Block ? b : undefined; + const aAsL2Block = a && a instanceof L2BlockNew ? a : undefined; + const bAsL2Block = b && b instanceof L2BlockNew ? b : undefined; if (aAsL2Block && bAsL2Block) { - // we got two L2Block instances, so we can compare them + // we got two L2BlockNew instances, so we can compare them // use a custom comparator because the blockHash property is lazily computed and one instance might not have it return aAsL2Block.toBuffer().equals(bAsL2Block.toBuffer()); } else if (aAsL2Block || bAsL2Block) { - // one value is an L2block and the other isn't. Definitely not equal. + // one value is an L2BlockNew and the other isn't. Definitely not equal. return false; } else { // we don't know what they are, tell Jest to keep looking diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 13a32ce71958..26dd44841343 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -22,8 +22,7 @@ import { AvmCircuitPublicInputs } from '../avm/avm_circuit_public_inputs.js'; import { PublicDataWrite } from '../avm/public_data_write.js'; import { RevertCode } from '../avm/revert_code.js'; import { AztecAddress } from '../aztec-address/index.js'; -import { CommitteeAttestation, L2BlockHeader, L2BlockNew, PublishedL2Block } from '../block/index.js'; -import { L2Block } from '../block/l2_block.js'; +import { CheckpointedL2Block, CommitteeAttestation, L2BlockNew } from '../block/index.js'; import type { CommitteeAttestationsAndSigners } from '../block/proposal/attestations_and_signers.js'; import { Checkpoint } from '../checkpoint/checkpoint.js'; import { L1PublishedData } from '../checkpoint/published_checkpoint.js'; @@ -76,9 +75,9 @@ import { makeAvmCircuitInputs, makeAztecAddress, makeBlockHeader, + makeCheckpointHeader, makeGas, makeGlobalVariables, - makeL2BlockHeader, makePrivateToPublicAccumulatedData, makePrivateToRollupAccumulatedData, makeProtocolContracts, @@ -493,7 +492,7 @@ export interface MakeConsensusPayloadOptions { signer?: Secp256k1Signer; attesterSigner?: Secp256k1Signer; proposerSigner?: Secp256k1Signer; - header?: L2BlockHeader; + header?: CheckpointHeader; archive?: Fr; txHashes?: TxHash[]; txs?: Tx[]; @@ -501,7 +500,7 @@ export interface MakeConsensusPayloadOptions { export interface MakeBlockProposalOptions { signer?: Secp256k1Signer; - blockHeader?: L2BlockHeader; + blockHeader?: BlockHeader; indexWithinCheckpoint?: number; inHash?: Fr; archiveRoot?: Fr; @@ -515,7 +514,7 @@ export interface MakeCheckpointProposalOptions { archiveRoot?: Fr; /** Options for the lastBlock - if undefined, no lastBlock is included */ lastBlock?: { - blockHeader?: L2BlockHeader; + blockHeader?: BlockHeader; indexWithinCheckpoint?: number; txHashes?: TxHash[]; txs?: Tx[]; @@ -527,18 +526,18 @@ const makeAndSignConsensusPayload = ( domainSeparator: SignatureDomainSeparator, options?: MakeConsensusPayloadOptions, ) => { - const header = options?.header ?? makeL2BlockHeader(1); + const header = options?.header ?? makeCheckpointHeader(1); const { signer = Secp256k1Signer.random(), archive = Fr.random() } = options ?? {}; const payload = ConsensusPayload.fromFields({ - header: header.toCheckpointHeader(), + header, archive, }); const hash = getHashedSignaturePayloadEthSignedMessage(payload, domainSeparator); const signature = signer.sign(hash); - return { blockNumber: header.globalVariables.blockNumber, payload, signature }; + return { blockNumber: header.slotNumber, payload, signature }; }; export const makeAndSignCommitteeAttestationsAndSigners = ( @@ -553,8 +552,7 @@ export const makeAndSignCommitteeAttestationsAndSigners = ( }; export const makeBlockProposal = (options?: MakeBlockProposalOptions): Promise => { - const l2BlockHeader = options?.blockHeader ?? makeL2BlockHeader(1); - const blockHeader = l2BlockHeader.toBlockHeader(); + const blockHeader = options?.blockHeader ?? makeBlockHeader(1); const indexWithinCheckpoint = options?.indexWithinCheckpoint ?? 0; const inHash = options?.inHash ?? Fr.random(); const archiveRoot = options?.archiveRoot ?? Fr.random(); @@ -574,15 +572,15 @@ export const makeBlockProposal = (options?: MakeBlockProposalOptions): Promise => { - const l2BlockHeader = options?.lastBlock?.blockHeader ?? makeL2BlockHeader(1); - const checkpointHeader = options?.checkpointHeader ?? l2BlockHeader.toCheckpointHeader(); + const blockHeader = options?.lastBlock?.blockHeader ?? makeBlockHeader(1); + const checkpointHeader = options?.checkpointHeader ?? makeCheckpointHeader(1); const archiveRoot = options?.archiveRoot ?? Fr.random(); const signer = options?.signer ?? Secp256k1Signer.random(); // Build lastBlock info if provided const lastBlockInfo = options?.lastBlock ? { - blockHeader: l2BlockHeader.toBlockHeader(), + blockHeader, indexWithinCheckpoint: options.lastBlock.indexWithinCheckpoint ?? 4, // Last block in a 5-block checkpoint txHashes: options.lastBlock.txHashes ?? [0, 1, 2, 3, 4, 5].map(() => TxHash.random()), txs: options.lastBlock.txs, @@ -609,7 +607,7 @@ export type MakeCheckpointAttestationOptions = { * Create a checkpoint attestation for testing */ export const makeCheckpointAttestation = (options: MakeCheckpointAttestationOptions = {}): CheckpointAttestation => { - const header = options.header ?? makeL2BlockHeader(1).toCheckpointHeader(); + const header = options.header ?? makeCheckpointHeader(1); const archive = options.archive ?? Fr.random(); const { signer, attesterSigner = signer, proposerSigner = signer } = options; @@ -672,14 +670,21 @@ export const makeCheckpointAttestationFromCheckpoint = ( }; /** - * Create a checkpoint attestation from an L2Block + * Create a checkpoint attestation from an L2BlockNew + * Note: This is a compatibility function for tests. L2BlockNew doesn't have a checkpoint header directly. */ export const makeCheckpointAttestationFromBlock = ( - block: L2Block, + block: L2BlockNew, attesterSigner?: Secp256k1Signer, proposerSigner?: Secp256k1Signer, ): CheckpointAttestation => { - const header = block.header.toCheckpointHeader(); + // For L2BlockNew, we create a minimal checkpoint header for testing purposes + const header = CheckpointHeader.empty({ + lastArchiveRoot: block.header.lastArchive.root, + slotNumber: block.slot, + timestamp: block.timestamp, + blockHeadersHash: Fr.ZERO, // Would need to compute from block header hash + }); const archive = block.archive.root; return makeCheckpointAttestation({ header, archive, attesterSigner, proposerSigner }); @@ -688,8 +693,8 @@ export const makeCheckpointAttestationFromBlock = ( export async function randomPublishedL2Block( l2BlockNumber: number, opts: { signers?: Secp256k1Signer[] } = {}, -): Promise { - const block = await L2Block.random(BlockNumber(l2BlockNumber)); +): Promise { + const block = await L2BlockNew.random(BlockNumber(l2BlockNumber)); const l1 = L1PublishedData.fromFields({ blockNumber: BigInt(block.number), timestamp: block.header.globalVariables.timestamp, @@ -697,15 +702,17 @@ export async function randomPublishedL2Block( }); const signers = opts.signers ?? times(3, () => Secp256k1Signer.random()); + const checkpoint = await Checkpoint.random(CheckpointNumber(l2BlockNumber), { numBlocks: 0 }); + checkpoint.blocks = [block]; const atts = signers.map(signer => makeCheckpointAttestation({ signer, archive: block.archive.root, - header: block.header.toCheckpointHeader(), + header: checkpoint.header, }), ); const attestations = atts.map( (attestation, i) => new CommitteeAttestation(signers[i].address, attestation.signature), ); - return new PublishedL2Block(block, l1, attestations); + return new CheckpointedL2Block(CheckpointNumber(l2BlockNumber), block, l1, attestations); } diff --git a/yarn-project/txe/src/oracle/txe_oracle_public_context.ts b/yarn-project/txe/src/oracle/txe_oracle_public_context.ts index 61e00340b32d..82ccd0263051 100644 --- a/yarn-project/txe/src/oracle/txe_oracle_public_context.ts +++ b/yarn-project/txe/src/oracle/txe_oracle_public_context.ts @@ -3,7 +3,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { PublicDataWrite } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2Block } from '@aztec/stdlib/block'; +import type { L2BlockNew } from '@aztec/stdlib/block'; import { computePublicDataTreeLeafSlot, siloNoteHash, siloNullifier } from '@aztec/stdlib/hash'; import { MerkleTreeId, @@ -124,7 +124,7 @@ export class TXEOraclePublicContext implements IAvmExecutionOracle { return value; } - async close(): Promise { + async close(): Promise { this.logger.debug('Exiting Public Context, building block with collected side effects', { blockNumber: this.globalVariables.blockNumber, }); diff --git a/yarn-project/txe/src/state_machine/index.ts b/yarn-project/txe/src/state_machine/index.ts index 965e54f44898..fb7ac830ab10 100644 --- a/yarn-project/txe/src/state_machine/index.ts +++ b/yarn-project/txe/src/state_machine/index.ts @@ -3,8 +3,8 @@ import { TestCircuitVerifier } from '@aztec/bb-prover/test'; import { createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { AnchorBlockStore } from '@aztec/pxe/server'; -import { L2Block } from '@aztec/stdlib/block'; -import { L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { L2BlockNew } from '@aztec/stdlib/block'; +import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import { getPackageVersion } from '@aztec/stdlib/update-checker'; @@ -58,8 +58,16 @@ export class TXEStateMachine { return new this(node, synchronizer, archiver, anchorBlockStore); } - public async handleL2Block(block: L2Block) { - const checkpoint = block.toCheckpoint(); + public async handleL2Block(block: L2BlockNew) { + // Create a checkpoint from the block - L2BlockNew doesn't have toCheckpoint() method + // We need to construct the Checkpoint manually + const checkpoint = await Checkpoint.random(block.checkpointNumber, { + numBlocks: 1, + startBlockNumber: Number(block.number), + }); + // Replace the random block with our actual block + checkpoint.blocks = [block]; + const publishedCheckpoint = new PublishedCheckpoint( checkpoint, new L1PublishedData( @@ -70,9 +78,9 @@ export class TXEStateMachine { [], ); await Promise.all([ - this.synchronizer.handleL2Block(block.toL2Block()), + this.synchronizer.handleL2Block(block), // L2BlockNew doesn't need toL2Block() conversion this.archiver.addCheckpoints([publishedCheckpoint], undefined), - this.anchorBlockStore.setHeader(block.getBlockHeader()), + this.anchorBlockStore.setHeader(block.header), // Use .header property directly ]); } } diff --git a/yarn-project/txe/src/utils/block_creation.ts b/yarn-project/txe/src/utils/block_creation.ts index dced0b469bc7..9e67c324af18 100644 --- a/yarn-project/txe/src/utils/block_creation.ts +++ b/yarn-project/txe/src/utils/block_creation.ts @@ -4,12 +4,12 @@ import { NULLIFIER_SUBTREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '@aztec/constants'; -import { BlockNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; -import { Body, L2Block, L2BlockHeader } from '@aztec/stdlib/block'; +import { Body, L2BlockNew } from '@aztec/stdlib/block'; import { AppendOnlyTreeSnapshot, MerkleTreeId, type MerkleTreeWriteOperations } from '@aztec/stdlib/trees'; -import { GlobalVariables, TxEffect } from '@aztec/stdlib/tx'; +import { BlockHeader, GlobalVariables, TxEffect } from '@aztec/stdlib/tx'; /** * Returns a transaction request hash that is valid for transactions that are the only ones in a block. @@ -46,26 +46,22 @@ export async function insertTxEffectIntoWorldTrees( export async function makeTXEBlockHeader( worldTrees: MerkleTreeWriteOperations, globalVariables: GlobalVariables, -): Promise { +): Promise { const stateReference = await worldTrees.getStateReference(); const archiveInfo = await worldTrees.getTreeInfo(MerkleTreeId.ARCHIVE); - return new L2BlockHeader( - new AppendOnlyTreeSnapshot(new Fr(archiveInfo.root), Number(archiveInfo.size)), - Fr.ZERO, - Fr.ZERO, - Fr.ZERO, - stateReference, + return BlockHeader.from({ + lastArchive: new AppendOnlyTreeSnapshot(new Fr(archiveInfo.root), Number(archiveInfo.size)), + spongeBlobHash: Fr.ZERO, + state: stateReference, globalVariables, - Fr.ZERO, - Fr.ZERO, - Fr.ZERO, - Fr.ZERO, - ); + totalFees: Fr.ZERO, + totalManaUsed: Fr.ZERO, + }); } /** - * Creates an L2Block with proper archive chaining. + * Creates an L2BlockNew with proper archive chaining. * This function: * 1. Gets the current archive state as lastArchive for the header * 2. Creates the block header @@ -75,21 +71,25 @@ export async function makeTXEBlockHeader( * @param worldTrees - The world trees to read/write from * @param globalVariables - Global variables for the block * @param txEffects - Transaction effects to include in the block - * @returns The created L2Block with proper archive chaining + * @returns The created L2BlockNew with proper archive chaining */ export async function makeTXEBlock( worldTrees: MerkleTreeWriteOperations, globalVariables: GlobalVariables, txEffects: TxEffect[], -): Promise { +): Promise { const header = await makeTXEBlockHeader(worldTrees, globalVariables); // Update the archive tree with this block's header hash - await worldTrees.updateArchive(header.toBlockHeader()); + await worldTrees.updateArchive(header); // Get the new archive state after updating const newArchiveInfo = await worldTrees.getTreeInfo(MerkleTreeId.ARCHIVE); const newArchive = new AppendOnlyTreeSnapshot(new Fr(newArchiveInfo.root), Number(newArchiveInfo.size)); - return new L2Block(newArchive, header, new Body(txEffects)); + // L2BlockNew requires checkpointNumber and indexWithinCheckpoint + const checkpointNumber = CheckpointNumber.fromBlockNumber(globalVariables.blockNumber); + const indexWithinCheckpoint = 0; + + return new L2BlockNew(newArchive, header, new Body(txEffects), checkpointNumber, indexWithinCheckpoint); } diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 52d0cb93af90..4943f1642732 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -230,7 +230,7 @@ Tests typically mock these dependencies: let epochCache: MockProxy; let blockSource: MockProxy; let txProvider: MockProxy; -let blockBuilder: MockProxy; +let checkpointsBuilder: MockProxy; let p2pClient: MockProxy; beforeEach(() => { @@ -245,19 +245,19 @@ beforeEach(() => { Use factory functions from `@aztec/stdlib/testing`: ```typescript -import { makeBlockProposal, makeCheckpointProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeBlockProposal, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; // These are async - always await const blockProposal = await makeBlockProposal({ - blockHeader: makeL2BlockHeader(1, 100, 100), // epoch, block, slot + blockHeader: makeBlockHeader(1, { blockNumber: BlockNumber(100), slotNumber: SlotNumber(100) }), indexWithinCheckpoint: 0, signer: Secp256k1Signer.random(), }); const checkpointProposal = await makeCheckpointProposal({ - checkpointHeader: makeL2BlockHeader(1, 100, 100).toCheckpointHeader(), + checkpointHeader: makeCheckpointHeader(1, { slotNumber: SlotNumber(100) }), signer: proposer, - lastBlock: { blockHeader, txs }, + lastBlock: { blockHeader: makeBlockHeader(1), txs }, }); ``` diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 549ff480c64e..8238adeaf0ce 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -15,37 +15,39 @@ import { import { L2BlockNew } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { ContractDataSource } from '@aztec/stdlib/contract'; +import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { + type BuildBlockInCheckpointResult, type FullNodeBlockBuilderConfig, FullNodeBlockBuilderConfigKeys, + type ICheckpointBlockBuilder, + type ICheckpointsBuilder, type MerkleTreeWriteOperations, type PublicProcessorLimits, + type WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; import { MerkleTreeId } from '@aztec/stdlib/trees'; -import { type CheckpointGlobalVariables, type FailedTx, GlobalVariables, StateReference, Tx } from '@aztec/stdlib/tx'; +import { type CheckpointGlobalVariables, GlobalVariables, StateReference, Tx } from '@aztec/stdlib/tx'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; import { createValidatorForBlockBuilding } from './tx_validator/tx_validator_factory.js'; +// Re-export for backward compatibility +export type { BuildBlockInCheckpointResult } from '@aztec/stdlib/interfaces/server'; + const log = createLogger('checkpoint-builder'); -export interface BuildBlockInCheckpointResult { - block: L2BlockNew; - publicGas: Gas; - publicProcessorDuration: number; - numTxs: number; - failedTxs: FailedTx[]; +/** Result of building a block within a checkpoint. Extends the base interface with timer. */ +export interface BuildBlockInCheckpointResultWithTimer extends BuildBlockInCheckpointResult { blockBuildingTimer: Timer; - usedTxs: Tx[]; - usedTxBlobFields: number; } /** * Builder for a single checkpoint. Handles building blocks within the checkpoint * and completing it. */ -export class CheckpointBuilder { +export class CheckpointBuilder implements ICheckpointBlockBuilder { constructor( private checkpointBuilder: LightweightCheckpointBuilder, private fork: MerkleTreeWriteOperations, @@ -67,7 +69,7 @@ export class CheckpointBuilder { blockNumber: BlockNumber, timestamp: bigint, opts: PublicProcessorLimits & { expectedEndState?: StateReference }, - ): Promise { + ): Promise { const blockBuildingTimer = new Timer(); const slot = this.checkpointBuilder.constants.slotNumber; @@ -172,12 +174,11 @@ export class CheckpointBuilder { } } -/** - * Factory for creating checkpoint builders. - */ -export class FullNodeCheckpointsBuilder { +/** Factory for creating checkpoint builders. */ +export class FullNodeCheckpointsBuilder implements ICheckpointsBuilder { constructor( - private config: FullNodeBlockBuilderConfig, + private config: FullNodeBlockBuilderConfig & Pick, + private worldState: WorldStateSynchronizer, private contractDataSource: ContractDataSource, private dateProvider: DateProvider, private telemetryClient: TelemetryClient = getTelemetryClient(), @@ -275,4 +276,9 @@ export class FullNodeCheckpointsBuilder { this.telemetryClient, ); } + + /** Returns a fork of the world state at the given block number. */ + getFork(blockNumber: BlockNumber): Promise { + return this.worldState.fork(blockNumber); + } } diff --git a/yarn-project/validator-client/src/duties/validation_service.test.ts b/yarn-project/validator-client/src/duties/validation_service.test.ts index 77ed30f45f65..0d33a4dbd3ee 100644 --- a/yarn-project/validator-client/src/duties/validation_service.test.ts +++ b/yarn-project/validator-client/src/duties/validation_service.test.ts @@ -2,7 +2,7 @@ import { getAddressFromPrivateKey } from '@aztec/ethereum/account'; import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { makeCheckpointProposal, makeL2BlockHeader } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; import { Tx } from '@aztec/stdlib/tx'; import { DutyType } from '@aztec/validator-ha-signer/types'; @@ -26,8 +26,7 @@ describe('ValidationService', () => { it('creates a block proposal with txs appended', async () => { const txs = await Promise.all([Tx.random(), Tx.random()]); - const l2BlockHeader = makeL2BlockHeader(1, 2, 3); - const blockHeader = l2BlockHeader.toBlockHeader(); + const blockHeader = makeBlockHeader(1); const indexWithinCheckpoint = 0; const inHash = Fr.random(); const archive = Fr.random(); @@ -48,8 +47,7 @@ describe('ValidationService', () => { it('creates a block proposal without txs appended', async () => { const txs = await Promise.all([Tx.random(), Tx.random()]); - const l2BlockHeader = makeL2BlockHeader(1, 2, 3); - const blockHeader = l2BlockHeader.toBlockHeader(); + const blockHeader = makeBlockHeader(1); const indexWithinCheckpoint = 0; const inHash = Fr.random(); const archive = Fr.random(); @@ -82,8 +80,7 @@ describe('ValidationService', () => { // Now they should use CHECKPOINT_PROPOSAL and BLOCK_PROPOSAL respectively. const txs = await Promise.all([Tx.random(), Tx.random()]); - const l2BlockHeader = makeL2BlockHeader(1, 2, 3); - const blockHeader = l2BlockHeader.toBlockHeader(); + const blockHeader = makeBlockHeader(1); const indexWithinCheckpoint = 0; const archive = Fr.random(); @@ -104,7 +101,7 @@ describe('ValidationService', () => { const spyService = new ValidationService(spyStore as any); // Create checkpoint header - const checkpointHeader = l2BlockHeader.toCheckpointHeader(); + const checkpointHeader = makeCheckpointHeader(1); // Create checkpoint proposal with lastBlock const proposal = await spyService.createCheckpointProposal( diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index 94a7cf46875c..c7164f56c01b 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -19,7 +19,7 @@ import type { L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; -import { makeCheckpointProposal, makeL2BlockHeader, mockTx } from '@aztec/stdlib/testing'; +import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal, mockTx } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { INSERT_SCHEMA_VERSION, SCHEMA_SETUP, SCHEMA_VERSION } from '@aztec/validator-ha-signer/db'; @@ -216,7 +216,7 @@ describe('ValidatorClient HA Integration', () => { describe('High-Availability signing coordination', () => { it('should allow only one validator instance to create a block proposal for the same slot', async () => { // Use all 5 validators - all try to create the same block proposal - const blockHeader = makeL2BlockHeader(1, 100, 100).toBlockHeader(); + const blockHeader = makeBlockHeader(1); const indexWithinCheckpoint = 0; const inHash = computeInHashFromL1ToL2Messages([]); const archive = Fr.random(); @@ -256,7 +256,7 @@ describe('ValidatorClient HA Integration', () => { // Each of the 5 validators creates a proposal for a different slot const proposals = await Promise.all( validators.map((v, i) => { - const blockHeader = makeL2BlockHeader(1, 100 + i, 100 + i).toBlockHeader(); + const blockHeader = makeBlockHeader(i + 1); const archive = Fr.random(); return v.createBlockProposal(blockHeader, 0, inHash, archive, txs, proposerAddress, { publishFullTxs: false, @@ -278,9 +278,9 @@ describe('ValidatorClient HA Integration', () => { const testSlot = 200; const txHashes = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); const checkpointProposal = await makeCheckpointProposal({ - checkpointHeader: makeL2BlockHeader(1, 100, testSlot).toCheckpointHeader(), + checkpointHeader: makeCheckpointHeader(testSlot), lastBlock: { - blockHeader: makeL2BlockHeader(1, 100, testSlot), + blockHeader: makeBlockHeader(testSlot), txHashes, }, }); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index fc4778367749..a23727de63c8 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -26,17 +26,14 @@ import type { L2BlockNew, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block import type { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; -import { - type L1ToL2MessageSource, - accumulateCheckpointOutHashes, - computeInHashFromL1ToL2Messages, -} from '@aztec/stdlib/messaging'; +import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { BlockProposal } from '@aztec/stdlib/p2p'; import { + makeBlockHeader, makeBlockProposal, makeCheckpointAttestation, + makeCheckpointHeader, makeCheckpointProposal, - makeL2BlockHeader, mockTx, } from '@aztec/stdlib/testing'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; @@ -48,7 +45,7 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; import type { - BuildBlockInCheckpointResult, + BuildBlockInCheckpointResultWithTimer, CheckpointBuilder, FullNodeCheckpointsBuilder, } from './checkpoint_builder.js'; @@ -159,8 +156,7 @@ describe('ValidatorClient', () => { describe('createBlockProposal', () => { it('should create a valid block proposal without txs', async () => { - const header = makeL2BlockHeader(); - const blockHeader = header.toBlockHeader(); + const blockHeader = makeBlockHeader(); const indexWithinCheckpoint = 0; const inHash = Fr.random(); const archive = Fr.random(); @@ -283,7 +279,7 @@ describe('ValidatorClient', () => { let proposal: BlockProposal; let blockNumber: BlockNumber; let sender: PeerId; - let blockBuildResult: BuildBlockInCheckpointResult; + let blockBuildResult: BuildBlockInCheckpointResultWithTimer; let mockCheckpointBuilder: MockProxy; const makeTxFromHash = (txHash: TxHash) => ({ getTxHash: () => txHash, txHash }) as Tx; @@ -301,9 +297,8 @@ describe('ValidatorClient', () => { beforeEach(async () => { const emptyInHash = computeInHashFromL1ToL2Messages([]); - const epochOutHash = accumulateCheckpointOutHashes([]); - const blockHeader = makeL2BlockHeader(1, 100, 100, { inHash: emptyInHash, epochOutHash }); - blockNumber = BlockNumber(blockHeader.getBlockNumber()); + const blockHeader = makeBlockHeader(1, { blockNumber: BlockNumber(100), slotNumber: SlotNumber(100) }); + blockNumber = BlockNumber(blockHeader.globalVariables.blockNumber); proposal = await makeBlockProposal({ blockHeader, inHash: emptyInHash }); // Set the current time to the start of the slot of the proposal const genesisTime = 1n; @@ -335,12 +330,12 @@ describe('ValidatorClient', () => { // Return parent block header when requested blockSource.getBlockHeaderByArchive.mockResolvedValue({ getBlockNumber: () => blockNumber - 1, - getSlot: () => SlotNumber(blockHeader.getSlot() - 1), + getSlot: () => SlotNumber(Number(blockHeader.globalVariables.slotNumber) - 1), } as BlockHeader); // Return parent block when requested (needed for checkpoint number computation) // The parent block has slot - 1, which is different from the proposal's slot - const parentSlot = SlotNumber(blockHeader.getSlot() - 1); + const parentSlot = SlotNumber(Number(blockHeader.globalVariables.slotNumber) - 1); blockSource.getL2BlockNew.mockResolvedValue({ checkpointNumber: CheckpointNumber(1), indexWithinCheckpoint: 0, @@ -353,7 +348,7 @@ describe('ValidatorClient', () => { blockSource.getGenesisValues.mockResolvedValue({ genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT) }); blockSource.syncImmediate.mockImplementation(() => Promise.resolve()); - const l2BlockHeader = blockHeader.clone(); + const clonedBlockHeader = blockHeader.clone(); blockBuildResult = { publicProcessorDuration: 0, numTxs: proposal.txHashes.length, @@ -363,7 +358,7 @@ describe('ValidatorClient', () => { usedTxs: [], usedTxBlobFields: 0, block: { - header: l2BlockHeader, + header: clonedBlockHeader, body: { txEffects: times(proposal.txHashes.length, () => TxEffect.empty()) }, archive: new AppendOnlyTreeSnapshot(proposal.archive, blockNumber), checkpointNumber: CheckpointNumber(1), @@ -394,8 +389,9 @@ describe('ValidatorClient', () => { const checkpointProposal = await makeCheckpointProposal({ archiveRoot: proposal.archive, + checkpointHeader: makeCheckpointHeader(0, { slotNumber: proposal.slotNumber }), lastBlock: { - blockHeader: makeL2BlockHeader(1, 123, proposal.slotNumber), + blockHeader: makeBlockHeader(1, { blockNumber: BlockNumber(123), slotNumber: proposal.slotNumber }), indexWithinCheckpoint: 0, txHashes: proposal.txHashes, }, @@ -414,8 +410,9 @@ describe('ValidatorClient', () => { const checkpointProposal = await makeCheckpointProposal({ archiveRoot: proposal.archive, + checkpointHeader: makeCheckpointHeader(0, { slotNumber: proposal.slotNumber }), lastBlock: { - blockHeader: makeL2BlockHeader(1, 123, proposal.slotNumber), + blockHeader: makeBlockHeader(1, { blockNumber: BlockNumber(123), slotNumber: proposal.slotNumber }), indexWithinCheckpoint: 0, txHashes: proposal.txHashes, }, @@ -622,7 +619,10 @@ describe('ValidatorClient', () => { // Use empty messages and compute the matching inHash const emptyInHash = computeInHashFromL1ToL2Messages([]); - const proposalBlockHeader = makeL2BlockHeader(1, parentBlockNumber + 1, parentSlotNumber); + const proposalBlockHeader = makeBlockHeader(1, { + blockNumber: BlockNumber(parentBlockNumber + 1), + slotNumber: SlotNumber(parentSlotNumber), + }); // Override the global variables on the block header (proposalBlockHeader as any).globalVariables = proposalGlobalVariables; diff --git a/yarn-project/world-state/src/native/native_bench.test.ts b/yarn-project/world-state/src/native/native_bench.test.ts index 189acbb275cd..5e7c2d8064d6 100644 --- a/yarn-project/world-state/src/native/native_bench.test.ts +++ b/yarn-project/world-state/src/native/native_bench.test.ts @@ -4,7 +4,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; -import { L2Block } from '@aztec/stdlib/block'; +import { L2BlockNew } from '@aztec/stdlib/block'; import { type IndexedTreeId, MerkleTreeId, type MerkleTreeReadOperations } from '@aztec/stdlib/trees'; import { jest } from '@jest/globals'; @@ -91,7 +91,7 @@ describe('Native World State: benchmarks', () => { ) => { const leaves: (Buffer | Fr)[][] = []; for (let i = 0; i < numBlocks; i++) { - const l2Block = await L2Block.random(BlockNumber(1), 1, 1, 1, undefined, undefined, numLeaves); + const l2Block = await L2BlockNew.random(BlockNumber(1), { txsPerBlock: 1 }); if (treeId === MerkleTreeId.PUBLIC_DATA_TREE) { leaves.push( l2Block.body.txEffects[0].publicDataWrites.filter(x => !x.isEmpty()).map(write => write.toBuffer()),