diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index d6a937255960..646153664c8c 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -87,6 +87,7 @@ "@aztec/stdlib": "workspace:^", "@aztec/telemetry-client": "workspace:^", "@aztec/validator-client": "workspace:^", + "@aztec/validator-ha-signer": "workspace:^", "@aztec/world-state": "workspace:^", "koa": "^2.16.1", "koa-router": "^13.1.1", diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 0981a9bebd18..856af7510522 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -332,7 +332,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { const watchers: Watcher[] = []; // Create validator client if required - const validatorClient = createValidatorClient(config, { + const validatorClient = await createValidatorClient(config, { checkpointsBuilder: validatorCheckpointsBuilder, worldState: worldStateSynchronizer, p2pClient, diff --git a/yarn-project/aztec-node/tsconfig.json b/yarn-project/aztec-node/tsconfig.json index d81856e4c971..90a91bf65be1 100644 --- a/yarn-project/aztec-node/tsconfig.json +++ b/yarn-project/aztec-node/tsconfig.json @@ -72,6 +72,9 @@ { "path": "../validator-client" }, + { + "path": "../validator-ha-signer" + }, { "path": "../world-state" } diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 515aa78c3d8e..0c769385a145 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -61,6 +61,7 @@ "@aztec/telemetry-client": "workspace:^", "@aztec/test-wallet": "workspace:^", "@aztec/validator-client": "workspace:^", + "@aztec/validator-ha-signer": "workspace:^", "@aztec/world-state": "workspace:^", "@iarna/toml": "^2.2.5", "@jest/globals": "^30.0.0", diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 9277e9b53265..2afda2cc510e 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -11,6 +11,7 @@ import type { CppPublicTxSimulator, PublicTxResult } from '@aztec/simulator/serv import { BlockProposal } from '@aztec/stdlib/p2p'; import { ReExFailedTxsError, ReExStateMismatchError, ReExTimeoutError } from '@aztec/stdlib/validators'; import type { ValidatorKeyStore } from '@aztec/validator-client'; +import { DutyType } from '@aztec/validator-ha-signer/types'; import { describe, it, jest } from '@jest/globals'; import fs from 'fs'; @@ -148,7 +149,13 @@ describe('e2e_p2p_reex', () => { proposal.archiveRoot, proposal.txHashes, undefined, - payload => signer.signMessageWithAddress(proposerAddress!, payload), + payload => + signer.signMessageWithAddress(proposerAddress!, payload, { + slot: proposal.blockHeader.globalVariables.slotNumber, + blockNumber: proposal.blockHeader.globalVariables.blockNumber, + blockIndexWithinCheckpoint: proposal.indexWithinCheckpoint, + dutyType: DutyType.BLOCK_PROPOSAL, + }), ); const p2pService = (p2pClient as any).p2pService as LibP2PService; diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 34cd783cf8c0..ecb11d50da80 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -109,6 +109,9 @@ { "path": "../validator-client" }, + { + "path": "../validator-ha-signer" + }, { "path": "../world-state" } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index e404ababe22d..b2db86a96bb8 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -314,11 +314,11 @@ export type EnvVar = | 'MAX_ALLOWED_ETH_CLIENT_DRIFT_SECONDS' | 'LEGACY_BLS_CLI' | 'DEBUG_FORCE_TX_PROOF_VERIFICATION' - | 'SLASHING_PROTECTION_NODE_ID' - | 'SLASHING_PROTECTION_POLLING_INTERVAL_MS' - | 'SLASHING_PROTECTION_SIGNING_TIMEOUT_MS' - | 'SLASHING_PROTECTION_ENABLED' - | 'SLASHING_PROTECTION_MAX_STUCK_DUTIES_AGE_MS' + | 'VALIDATOR_HA_SIGNING_ENABLED' + | 'VALIDATOR_HA_NODE_ID' + | 'VALIDATOR_HA_POLLING_INTERVAL_MS' + | 'VALIDATOR_HA_SIGNING_TIMEOUT_MS' + | 'VALIDATOR_HA_MAX_STUCK_DUTIES_AGE_MS' | 'VALIDATOR_HA_DATABASE_URL' | 'VALIDATOR_HA_RUN_MIGRATIONS' | 'VALIDATOR_HA_POOL_MAX' diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 044a278d6690..862ef20f54bc 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -49,6 +49,7 @@ "@aztec/stdlib": "workspace:^", "@aztec/telemetry-client": "workspace:^", "@aztec/validator-client": "workspace:^", + "@aztec/validator-ha-signer": "workspace:^", "@aztec/world-state": "workspace:^", "lodash.chunk": "^4.2.0", "tslib": "^2.4.0", @@ -57,6 +58,7 @@ "devDependencies": { "@aztec/archiver": "workspace:^", "@aztec/kv-store": "workspace:^", + "@electric-sql/pglite": "^0.3.14", "@jest/globals": "^30.0.0", "@types/jest": "^30.0.0", "@types/lodash.chunk": "^4.2.7", diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts index 91b38089b1e8..5917bc5c5a83 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts @@ -33,6 +33,8 @@ import { GlobalVariables, type Tx } from '@aztec/stdlib/tx'; import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { getTelemetryClient } from '@aztec/telemetry-client'; import type { FullNodeCheckpointsBuilder, ValidatorClient } from '@aztec/validator-client'; +import { DutyAlreadySignedError, SlashingProtectionError } from '@aztec/validator-ha-signer/errors'; +import { DutyType } from '@aztec/validator-ha-signer/types'; import { expect, jest } from '@jest/globals'; import EventEmitter from 'events'; @@ -420,6 +422,72 @@ describe('CheckpointProposalJob', () => { }); }); + /** + * Helper to set up multiple blocks for testing. + * Creates the specified number of blocks with proper global variables and seeds the checkpoint builder. + * @param numBlocks - Number of blocks to create + * @param txsPerBlock - Number of transactions per block (or array for different counts per block) + * @param startBlockNumber - Starting block number (defaults to newBlockNumber) + * @returns Object containing the created blocks, txs, and the last block for attestations + */ + async function setupMultipleBlocks( + numBlocks: number, + txsPerBlock: number | number[] = 1, + startBlockNumber: BlockNumber = newBlockNumber, + ): Promise<{ + blocks: Awaited>[]; + txs: Awaited>[]; + lastBlock: Awaited>; + }> { + // Create txs - determine total needed + const txsPerBlockArray = Array.isArray(txsPerBlock) ? txsPerBlock : Array(numBlocks).fill(txsPerBlock); + const totalTxs = txsPerBlockArray.reduce((sum, count) => sum + count, 0); + const txs = await Promise.all(Array.from({ length: totalTxs }, (_, i) => makeTx(i + 1, chainId))); + + // Set up p2p mocks + p2p.getPendingTxCount.mockResolvedValue(10); + p2p.iteratePendingTxs.mockImplementation(() => mockTxIterator(Promise.resolve(txs))); + + // Create blocks with incrementing block numbers + const blocks: Awaited>[] = []; + const blockTxs: Awaited>[][] = []; + let txIndex = 0; + + for (let i = 0; i < numBlocks; i++) { + const blockNum = BlockNumber(startBlockNumber + i); + const blockGlobalVariables = + i === 0 + ? globalVariables + : new GlobalVariables( + chainId, + version, + blockNum, + SlotNumber(newSlotNumber), + 0n, + coinbase, + feeRecipient, + gasFees, + ); + + const blockTxCount = txsPerBlockArray[i]; + const blockTxsSlice = txs.slice(txIndex, txIndex + blockTxCount); + txIndex += blockTxCount; + + const block = await makeBlock(blockTxsSlice, blockGlobalVariables); + blocks.push(block); + blockTxs.push(blockTxsSlice); + } + + // Seed checkpoint builder with all blocks + checkpointBuilder.seedBlocks(blocks, blockTxs); + + return { + blocks, + txs, + lastBlock: blocks[blocks.length - 1], + }; + } + /** * Helper to create a TestCheckpointProposalJob instance with current mocks. * Uses TestCheckpointProposalJob which has waitUntilTimeInSlot as a no-op. @@ -483,31 +551,9 @@ describe('CheckpointProposalJob', () => { .mockReturnValueOnce({ canStart: true, deadline: 18, isLastBlock: true }) .mockReturnValue({ canStart: false, deadline: undefined, isLastBlock: false }); - // Create enough txs for 2 blocks - const txs = await Promise.all([makeTx(1, chainId), makeTx(2, chainId), makeTx(3, chainId)]); - - // Always have txs available - p2p.getPendingTxCount.mockResolvedValue(10); - p2p.iteratePendingTxs.mockImplementation(() => mockTxIterator(Promise.resolve(txs))); - - // Create 2 blocks - const block1 = await makeBlock(txs.slice(0, 2), globalVariables); - const globalVariables2 = new GlobalVariables( - chainId, - version, - BlockNumber(newBlockNumber + 1), - SlotNumber(newSlotNumber), - 0n, - coinbase, - feeRecipient, - gasFees, - ); - const block2 = await makeBlock([txs[2]], globalVariables2); - - // Seed MockCheckpointBuilder with blocks to return sequentially - checkpointBuilder.seedBlocks([block1, block2], [txs.slice(0, 2), [txs[2]]]); - - validatorClient.collectAttestations.mockResolvedValue(getAttestations(block2)); + // Set up test data for 2 blocks + const { lastBlock } = await setupMultipleBlocks(2, [2, 1]); + validatorClient.collectAttestations.mockResolvedValue(getAttestations(lastBlock)); // Install spy on waitUntilTimeInSlot to verify it's called with expected deadlines const waitSpy = jest.spyOn(job, 'waitUntilTimeInSlot'); @@ -569,16 +615,9 @@ describe('CheckpointProposalJob', () => { .mockReturnValueOnce({ canStart: true, deadline: 2 + 3 * blockDurationSeconds, isLastBlock: true }) .mockReturnValue({ canStart: false, deadline: undefined, isLastBlock: false }); - const txs = await Promise.all([makeTx(1, chainId), makeTx(2, chainId), makeTx(3, chainId)]); - const block = await makeBlock(txs, globalVariables); - - p2p.getPendingTxCount.mockResolvedValue(10); - p2p.iteratePendingTxs.mockImplementation(() => mockTxIterator(Promise.resolve(txs))); - - // Seed with 3 identical blocks (each with 1 tx) - checkpointBuilder.seedBlocks([block, block, block], [[txs[0]], [txs[1]], [txs[2]]]); - - validatorClient.collectAttestations.mockResolvedValue(getAttestations(block)); + // Set up test data for 3 blocks + const { lastBlock } = await setupMultipleBlocks(3, 1); + validatorClient.collectAttestations.mockResolvedValue(getAttestations(lastBlock)); const waitSpy = jest.spyOn(job, 'waitUntilTimeInSlot'); @@ -801,6 +840,90 @@ describe('CheckpointProposalJob', () => { expect(validatorClient.collectAttestations).toHaveBeenCalled(); }); }); + + describe('HA error handling during block building', () => { + it('should stop checkpoint building when block proposal throws DutyAlreadySignedError on first block', async () => { + // Set up test data for 3 blocks (to verify it stops even with multiple blocks configured) + const { lastBlock } = await setupMultipleBlocks(3, 1); + validatorClient.collectAttestations.mockResolvedValue(getAttestations(lastBlock)); + + // Create job first + job.setTimetable( + new SequencerTimetable({ + ethereumSlotDuration, + aztecSlotDuration: slotDuration, + l1PublishingTime: ethereumSlotDuration, + blockDurationMs: 8000, + enforce: true, + }), + ); + + // Mock timetable to allow multiple blocks + jest + .spyOn(job.getTimetable(), 'canStartNextBlock') + .mockReturnValueOnce({ canStart: true, deadline: 4, isLastBlock: false }) + .mockReturnValueOnce({ canStart: true, deadline: 8, isLastBlock: false }) + .mockReturnValueOnce({ canStart: true, deadline: 12, isLastBlock: false }) + .mockReturnValue({ canStart: false, deadline: undefined, isLastBlock: false }); + + // Mock to throw on first block proposal + validatorClient.createBlockProposal.mockImplementation(() => { + throw new DutyAlreadySignedError(SlotNumber(1), DutyType.BLOCK_PROPOSAL, 0, 'node-2'); + }); + + const result = await job.execute(); + + // Should return undefined and stop building + expect(result).toBeUndefined(); + // Should have attempted only 1 block proposal (first one threw) + expect(validatorClient.createBlockProposal).toHaveBeenCalledTimes(1); + // Should not have attempted checkpoint proposal + expect(validatorClient.createCheckpointProposal).not.toHaveBeenCalled(); + // Should not publish anything + expect(publisher.enqueueProposeCheckpoint).not.toHaveBeenCalled(); + }); + + it('should stop checkpoint building when block proposal throws SlashingProtectionError on first block', async () => { + // Set up test data for 3 blocks (to verify it stops even with multiple blocks configured) + const { lastBlock } = await setupMultipleBlocks(3, 1); + validatorClient.collectAttestations.mockResolvedValue(getAttestations(lastBlock)); + + // Create job first + job.setTimetable( + new SequencerTimetable({ + ethereumSlotDuration, + aztecSlotDuration: slotDuration, + l1PublishingTime: ethereumSlotDuration, + blockDurationMs: 8000, + enforce: true, + }), + ); + + // Mock timetable to allow multiple blocks + jest + .spyOn(job.getTimetable(), 'canStartNextBlock') + .mockReturnValueOnce({ canStart: true, deadline: 4, isLastBlock: false }) + .mockReturnValueOnce({ canStart: true, deadline: 8, isLastBlock: false }) + .mockReturnValueOnce({ canStart: true, deadline: 12, isLastBlock: false }) + .mockReturnValue({ canStart: false, deadline: undefined, isLastBlock: false }); + + // Mock to throw on first block proposal + validatorClient.createBlockProposal.mockImplementation(() => { + throw new SlashingProtectionError(SlotNumber(1), DutyType.BLOCK_PROPOSAL, 0, 'hash1', 'hash2', 'node-1'); + }); + + const result = await job.execute(); + + // Should return undefined and stop building + expect(result).toBeUndefined(); + // Should have attempted only 1 block proposal (first one threw) + expect(validatorClient.createBlockProposal).toHaveBeenCalledTimes(1); + // Should not have attempted checkpoint proposal + expect(validatorClient.createCheckpointProposal).not.toHaveBeenCalled(); + // Should not publish anything + expect(publisher.enqueueProposeCheckpoint).not.toHaveBeenCalled(); + }); + }); }); class TestCheckpointProposalJob extends CheckpointProposalJob { diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 9883bbb43754..f5386a33e25e 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -37,6 +37,7 @@ import { type FailedTx, Tx } from '@aztec/stdlib/tx'; import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { Attributes, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { CheckpointBuilder, type FullNodeCheckpointsBuilder, type ValidatorClient } from '@aztec/validator-client'; +import { DutyAlreadySignedError, SlashingProtectionError } from '@aztec/validator-ha-signer/errors'; import type { GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import type { InvalidateCheckpointRequest, SequencerPublisher } from '../publisher/sequencer-publisher.js'; @@ -202,13 +203,40 @@ export class CheckpointProposalJob implements Traceable { broadcastInvalidCheckpointProposal: this.config.broadcastInvalidBlockProposal, }; - // Main loop: build blocks for the checkpoint - const { blocksInCheckpoint, blockPendingBroadcast } = await this.buildBlocksForCheckpoint( - checkpointBuilder, - checkpointGlobalVariables.timestamp, - inHash, - blockProposalOptions, - ); + let blocksInCheckpoint: L2BlockNew[] = []; + let blockPendingBroadcast: { block: L2BlockNew; txs: Tx[] } | undefined = undefined; + + try { + // Main loop: build blocks for the checkpoint + const result = await this.buildBlocksForCheckpoint( + checkpointBuilder, + checkpointGlobalVariables.timestamp, + inHash, + blockProposalOptions, + ); + blocksInCheckpoint = result.blocksInCheckpoint; + blockPendingBroadcast = result.blockPendingBroadcast; + } catch (err) { + // These errors are expected in HA mode, so we yield and let another HA node handle the slot + // The only distinction between the 2 errors is SlashingProtectionError throws when the payload is different, + // which is normal for block building (may have picked different txs) + if (err instanceof DutyAlreadySignedError) { + this.log.info(`Checkpoint proposal for slot ${this.slot} already signed by another HA node, yielding`, { + slot: this.slot, + signedByNode: err.signedByNode, + }); + return undefined; + } + if (err instanceof SlashingProtectionError) { + this.log.info(`Checkpoint proposal for slot ${this.slot} blocked by slashing protection, yielding`, { + slot: this.slot, + existingMessageHash: err.existingMessageHash, + attemptedMessageHash: err.attemptedMessageHash, + }); + return undefined; + } + throw err; + } if (blocksInCheckpoint.length === 0) { this.log.warn(`No blocks were built for slot ${this.slot}`, { slot: this.slot }); @@ -263,7 +291,34 @@ export class CheckpointProposalJob implements Traceable { // Proposer must sign over the attestations before pushing them to L1 const signer = this.proposer ?? this.publisher.getSenderAddress(); - const attestationsSignature = await this.validatorClient.signAttestationsAndSigners(attestations, signer); + let attestationsSignature: Signature; + try { + attestationsSignature = await this.validatorClient.signAttestationsAndSigners( + attestations, + signer, + this.slot, + this.checkpointNumber, + ); + } catch (err) { + // We shouldn't really get here since we yield to another HA node + // as soon as we see these errors when creating block proposals. + if (err instanceof DutyAlreadySignedError) { + this.log.info(`Attestations signature for slot ${this.slot} already signed by another HA node, yielding`, { + slot: this.slot, + signedByNode: err.signedByNode, + }); + return undefined; + } + if (err instanceof SlashingProtectionError) { + this.log.info(`Attestations signature for slot ${this.slot} blocked by slashing protection, yielding`, { + slot: this.slot, + existingMessageHash: err.existingMessageHash, + attemptedMessageHash: err.attemptedMessageHash, + }); + return undefined; + } + throw err; + } // Enqueue publishing the checkpoint to L1 this.setStateFn(SequencerState.PUBLISHING_CHECKPOINT, this.slot); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts new file mode 100644 index 000000000000..3977843707eb --- /dev/null +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts @@ -0,0 +1,803 @@ +/** + * Integration tests for High-Availability sequencer signing (governance & slashing votes) + * + * These tests use real services (pglite database, HA signer, HA key store) + * rather than mocks to verify the HA coordination works correctly for sequencer + * voting functions (governance and slashing). + */ +import type { BlobClientInterface } from '@aztec/blob-client/client'; +import type { EpochCache } from '@aztec/epoch-cache'; +import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; +import type { + EmpireSlashingProposerContract, + GovernanceProposerContract, + RollupContract, +} from '@aztec/ethereum/contracts'; +import { Multicall3 } from '@aztec/ethereum/contracts'; +import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; +import { EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { SecretValue } from '@aztec/foundation/config'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import type { Hex } from '@aztec/foundation/string'; +import { TestDateProvider } from '@aztec/foundation/timer'; +import { type KeyStore, KeystoreManager } from '@aztec/node-keystore'; +import type { ProposerSlashAction } from '@aztec/slasher'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { SlashFactoryContract } from '@aztec/stdlib/l1-contracts'; +import { getTelemetryClient } from '@aztec/telemetry-client'; +import { HAKeyStore, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; +import type { ValidatorClientConfig } from '@aztec/validator-client/config'; +import { INSERT_SCHEMA_VERSION, SCHEMA_SETUP, SCHEMA_VERSION } from '@aztec/validator-ha-signer/db'; +import { createHASigner } from '@aztec/validator-ha-signer/factory'; +import { Pool } from '@aztec/validator-ha-signer/test'; + +import { PGlite } from '@electric-sql/pglite'; +import { afterEach, beforeEach, describe, expect, it, jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; + +import type { SequencerClientConfig } from '../config.js'; +import type { SequencerPublisherMetrics } from '../publisher/sequencer-publisher-metrics.js'; +import { SequencerPublisher } from '../publisher/sequencer-publisher.js'; +import { CheckpointVoter } from './checkpoint_voter.js'; +import type { SequencerMetrics } from './metrics.js'; +import type { SequencerRollupConstants } from './types.js'; + +describe('CheckpointVoter HA Integration', () => { + let pglite: PGlite; + let validatorPrivateKeys: `0x${string}`[]; + let validatorAccounts: PrivateKeyAccount[]; + let keyStoreManager: KeystoreManager; + + // Mock dependencies + let rollupContract: MockProxy; + let governanceProposerContract: MockProxy; + let slashingProposerContract: MockProxy; + let l1TxUtils: MockProxy; + let dateProvider: TestDateProvider; + let sequencerMetrics: MockProxy; + let publisherMetrics: MockProxy; + let forwardSpy: jest.SpiedFunction; + + // Track resources for cleanup + let pools: Pool[]; + let validatorClients: ValidatorClient[]; + + const TEST_L1_CONSTANTS: SequencerRollupConstants = { + l1GenesisTime: 1n, + slotDuration: 24, + ethereumSlotDuration: DefaultL1ContractsConfig.ethereumSlotDuration, + }; + + /** + * Helper to create mock governance contract with proper signer invocation + */ + function createMockGovernanceContract(): MockProxy { + const contract = mock(); + Object.defineProperty(contract, 'address', { value: EthAddress.random(), writable: false }); + contract.getRoundInfo.mockResolvedValue({ + lastSignalSlot: SlotNumber(1), + payloadWithMostSignals: EthAddress.ZERO.toString(), + quorumReached: false, + executed: false, + }); + contract.computeRound.mockResolvedValue(1n); + // Mock must actually call the signer function to trigger HA protection + contract.createSignalRequestWithSignature.mockImplementation( + async (_payload, _slot, _chainId, _signerAddress, signer) => { + const mockTypedData = { + domain: { name: 'GovernanceProposer', version: '1', chainId: 1 }, + types: { + Signal: [ + { name: 'payload', type: 'address' }, + { name: 'slot', type: 'uint256' }, + ], + }, + primaryType: 'Signal', + message: { payload: _payload, slot: _slot.toString() }, + }; + await signer(mockTypedData); + return { + to: contract.address.toString(), + data: '0x' as `0x${string}`, + }; + }, + ); + return contract; + } + + /** + * Helper to create mock slashing contract with proper signer invocation + */ + function createMockSlashingContract(): MockProxy { + const contract = mock(); + Object.defineProperty(contract, 'type', { value: 'empire', writable: false }); + Object.defineProperty(contract, 'address', { value: EthAddress.random(), writable: false }); + contract.getRoundInfo.mockResolvedValue({ + lastSignalSlot: SlotNumber(1), + payloadWithMostSignals: EthAddress.ZERO.toString(), + quorumReached: false, + executed: false, + }); + contract.computeRound.mockResolvedValue(1n); + // Mock must actually call the signer function to trigger HA protection + contract.createSignalRequestWithSignature.mockImplementation( + async (_payload, _slot, _chainId, _signerAddress, signer) => { + const mockTypedData = { + domain: { name: 'SlashingProposer', version: '1', chainId: 1 }, + types: { + Signal: [ + { name: 'payload', type: 'address' }, + { name: 'slot', type: 'uint256' }, + ], + }, + primaryType: 'Signal', + message: { payload: _payload, slot: _slot.toString() }, + }; + await signer(mockTypedData as any); + return { + to: contract.address.toString(), + data: '0x' as any, + }; + }, + ); + return contract; + } + + /** + * Helper to create mock L1 tx utils + */ + function createMockL1TxUtils(validatorAccount: PrivateKeyAccount): MockProxy { + const txUtils = mock(); + txUtils.client = { + account: validatorAccount, + getCode: () => Promise.resolve('0x1234' as `0x${string}`), + } as any; + txUtils.getSenderAddress.mockReturnValue(EthAddress.fromString(validatorAccount.address)); + txUtils.simulate.mockResolvedValue({ + gasUsed: 100000n, + result: '0x', + }); + // Mock getCode to return non-empty bytecode for governance/slashing payloads + txUtils.getCode.mockResolvedValue('0x1234' as any); + return txUtils; + } + + beforeEach(async () => { + // Initialize cleanup tracking arrays + pools = []; + validatorClients = []; + + // Create a fresh pglite database + pglite = new PGlite(); + // Set up the database schema for testing + for (const statement of SCHEMA_SETUP) { + await pglite.query(statement); + } + await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); + + // Generate validator keys - using 5 validators + validatorPrivateKeys = Array.from({ length: 5 }, () => generatePrivateKey()); + validatorAccounts = validatorPrivateKeys.map(privateKey => privateKeyToAccount(privateKey)); + + // Set up spy for Multicall3.forward to track L1 transaction sending + forwardSpy = jest.spyOn(Multicall3, 'forward'); + + // Set up mocks using helper functions + rollupContract = mock(); + Object.defineProperty(rollupContract, 'address', { value: EthAddress.random(), writable: false }); + rollupContract.listenToSlasherChanged.mockReturnValue(undefined as any); + rollupContract.getSlashingProposer.mockResolvedValue(undefined); + + governanceProposerContract = createMockGovernanceContract(); + slashingProposerContract = createMockSlashingContract(); + l1TxUtils = createMockL1TxUtils(validatorAccounts[0]); + + dateProvider = new TestDateProvider(); + sequencerMetrics = mock(); + publisherMetrics = mock(); + + // Create keystore + const keyStore: KeyStore = { + schemaVersion: 1, + validators: [ + { + attester: validatorPrivateKeys.map(key => key as Hex<32>), + feeRecipient: AztecAddress.ZERO, + }, + ], + }; + keyStoreManager = new KeystoreManager(keyStore); + }); + + afterEach(async () => { + for (const validator of validatorClients) { + await validator.stop(); + } + + for (const pool of pools) { + await pool.end(); + } + + if (pglite) { + await pglite.close(); + pglite = undefined as any; + } + + // Restore the spy to avoid affecting other tests + forwardSpy.mockRestore(); + }); + + /** + * Helper to create a CheckpointVoter with HA signing enabled using pglite + * Returns the voter, publisher, and validator client for testing + */ + async function createHACheckpointVoter( + slot: SlotNumber, + config: Partial, + ): Promise<{ + voter: CheckpointVoter; + publisher: SequencerPublisher; + validatorClient: ValidatorClient; + }> { + const pool = new Pool({ pglite }); + pools.push(pool); + + const baseConfig: ValidatorClientConfig = { + validatorPrivateKeys: new SecretValue(validatorPrivateKeys), + attestationPollingIntervalMs: 1000, + disableValidator: false, + disabledValidators: [], + validatorReexecute: false, + haSigningEnabled: true, + nodeId: config.nodeId || 'ha-node-1', + pollingIntervalMs: 100, + signingTimeoutMs: 3000, + maxStuckDutiesAgeMs: 72000, + databaseUrl: 'postgresql://test', + }; + + // Create HA signer with pglite pool + const { signer: haSigner } = await createHASigner(baseConfig, { pool: pool as any }); + + // Create base keystore + const baseKeyStore = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); + + // Wrap with HA key store + const haKeyStore = new HAKeyStore(baseKeyStore, haSigner); + + // Create publisher + const publisherConfig = { + ...config, + viemPollingIntervalMS: 1000, + l1PublishRetryIntervalMS: 1000, + l1RpcUrl: 'http://localhost:8545', + requiredConfirmations: 1, + maxL1TxInclusionWaitPulseSeconds: 60, + ethereumSlotDuration: DefaultL1ContractsConfig.ethereumSlotDuration, + fishermanMode: false, + l1ChainId: 1, + }; + + const blobClient = mock(); + blobClient.canUpload.mockReturnValue(false); + + const epochCache = mock(); + epochCache.getEpochAndSlotNow.mockReturnValue({ + epoch: EpochNumber(1), + slot: slot, + ts: BigInt(Date.now()), + now: BigInt(Date.now()), + }); + + const slashFactoryContract = mock(); + + const publisher = new SequencerPublisher(publisherConfig as any, { + telemetry: getTelemetryClient(), + blobClient, + l1TxUtils, + rollupContract, + slashingProposerContract, + governanceProposerContract, + slashFactoryContract, + epochCache, + dateProvider, + metrics: publisherMetrics, + lastActions: {}, + }); + + // Create mock validator client with real signing via HA keystore + const validatorClient = mock(); + + // Delegate signWithAddress to the HA keystore for real HA protection + validatorClient.signWithAddress.mockImplementation((address, msg, context) => { + return haKeyStore.signTypedDataWithAddress(address, msg, context); + }); + + // Manage HA signer lifecycle through validator client mock + validatorClient.start.mockImplementation(() => Promise.resolve(haSigner.start())); + validatorClient.stop.mockImplementation(async () => await haSigner.stop()); + + // Track for cleanup and start the signer + validatorClients.push(validatorClient); + await validatorClient.start(); + + // Create the checkpoint voter + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + const voter = new CheckpointVoter( + slot, + publisher, + attestorAddress, + validatorClient, + undefined, // slasherClient - we'll mock this per test + TEST_L1_CONSTANTS, + config as any, + sequencerMetrics, + publisher['log'], + ); + + return { voter, publisher, validatorClient }; + } + + /** + * Helper to create a CheckpointVoter with slashing actions configured + * Reduces duplication in tests that need both governance and slashing + */ + async function createHACheckpointVoterWithSlasher( + slot: SlotNumber, + config: Partial, + slashingActions: ProposerSlashAction[], + ): Promise<{ + voter: CheckpointVoter; + publisher: SequencerPublisher; + validatorClient: ValidatorClient; + }> { + const result = await createHACheckpointVoter(slot, config); + + // Mock slasher client to return the provided actions + const slasherClient = { + getProposerActions: () => slashingActions, + }; + + // Create new voter with slasher client configured + const attestorAddress = EthAddress.fromString(validatorAccounts[0].address); + const voter = new CheckpointVoter( + slot, + result.publisher, + attestorAddress, + result.validatorClient, + slasherClient as any, + TEST_L1_CONSTANTS, + config as any, + sequencerMetrics, + result.publisher['log'], + ); + + return { voter, publisher: result.publisher, validatorClient: result.validatorClient }; + } + + /** + * Helper to query the HA database and verify duty records + * Useful for debugging and verifying HA coordination worked correctly + */ + async function getDutyRecords(slot: SlotNumber) { + const result = await pglite.query<{ slot: string; duty_type: string; node_id: string; started_at: Date }>( + 'SELECT slot, duty_type, node_id, started_at FROM validator_duties WHERE slot = $1 ORDER BY started_at', + [slot.toString()], + ); + return result.rows; + } + + describe('High-Availability governance vote coordination', () => { + it('should allow only one sequencer instance to enqueue a governance vote for the same slot', async () => { + const slot = SlotNumber(100); + const governancePayload = EthAddress.random(); + + // Create 5 checkpoint voters for the same slot + const voters = await Promise.all( + Array.from({ length: 5 }, (_, i) => + createHACheckpointVoter(slot, { + nodeId: `ha-node-${i + 1}`, + governanceProposerPayload: governancePayload, + }), + ), + ); + + // All 5 voters try to enqueue governance votes simultaneously + const results = await Promise.all( + voters.map(({ voter }) => { + const [governancePromise, _slashingPromise] = voter.enqueueVotes(); + return governancePromise; + }), + ); + + // Count successes - enqueueVotes returns true/false, never throws + const successCount = results.filter(r => r === true).length; + const failureCount = results.filter(r => r === false).length; + + // Exactly one should succeed, others should fail due to HA coordination + // DutyAlreadySignedError is caught and converted to false + expect(successCount).toBe(1); + expect(failureCount).toBe(4); + + // Verify database state - exactly one governance vote duty should be recorded + const duties = await getDutyRecords(slot); + const governanceDuties = duties.filter(d => d.duty_type === 'GOVERNANCE_VOTE'); + expect(governanceDuties).toHaveLength(1); + + // The winning node should be one of our 5 nodes + const winningNodeId = governanceDuties[0].node_id; + expect(['ha-node-1', 'ha-node-2', 'ha-node-3', 'ha-node-4', 'ha-node-5']).toContain(winningNodeId); + }); + + it('should allow different sequencers to vote for different slots', async () => { + const governancePayload = EthAddress.random(); + + // Create 5 checkpoint voters for different slots + const slots = Array.from({ length: 5 }, (_, i) => SlotNumber(100 + i)); + const voters = await Promise.all( + slots.map((slot, i) => + createHACheckpointVoter(slot, { + nodeId: `ha-node-${i + 1}`, + governanceProposerPayload: governancePayload, + }), + ), + ); + + // Each voter enqueues votes for their respective slot + const results = await Promise.all( + voters.map(({ voter }) => { + const [governancePromise, _slashingPromise] = voter.enqueueVotes(); + return governancePromise; + }), + ); + + // All 5 should succeed since they're for different slots + results.forEach(result => { + expect(result).toBe(true); + }); + + // Verify database - each slot should have exactly one governance duty + for (const slot of slots) { + const duties = await getDutyRecords(slot); + const governanceDuties = duties.filter(d => d.duty_type === 'GOVERNANCE_VOTE'); + expect(governanceDuties).toHaveLength(1); + } + }); + }); + + describe('High-Availability slashing vote coordination', () => { + it('should allow only one sequencer instance to enqueue slashing votes for the same slot', async () => { + const slot = SlotNumber(200); + const slashingPayload = EthAddress.random(); + + // Create mock slashing actions + const mockSlashingActions: ProposerSlashAction[] = [ + { + type: 'vote-empire-payload', + payload: slashingPayload, + }, + ]; + + // Create 5 checkpoint voters with slashing actions using the helper + const voters = await Promise.all( + Array.from({ length: 5 }, (_, i) => + createHACheckpointVoterWithSlasher(slot, { nodeId: `ha-node-${i + 1}` }, mockSlashingActions), + ), + ); + + // All 5 voters try to enqueue slashing votes simultaneously + const results = await Promise.all( + voters.map(({ voter }) => { + const [_governancePromise, slashingPromise] = voter.enqueueVotes(); + return slashingPromise; + }), + ); + + // Count successes - enqueueVotes returns true/false, never throws + const successCount = results.filter(r => r === true).length; + const failureCount = results.filter(r => r === false).length; + + // Exactly one should succeed, others should fail due to HA coordination + expect(successCount).toBe(1); + expect(failureCount).toBe(4); + + // Verify database state - exactly one slashing vote duty should be recorded + const duties = await getDutyRecords(slot); + const slashingDuties = duties.filter(d => d.duty_type === 'SLASHING_VOTE'); + expect(slashingDuties).toHaveLength(1); + }); + + it('should allow different sequencers to vote on slashing for different slots', async () => { + const slashingPayload = EthAddress.random(); + + // Create mock slashing actions + const mockSlashingActions: ProposerSlashAction[] = [ + { + type: 'vote-empire-payload', + payload: slashingPayload, + }, + ]; + + // Create 5 checkpoint voters for different slots with slashing actions + const slots = Array.from({ length: 5 }, (_, i) => SlotNumber(200 + i)); + const voters = await Promise.all( + slots.map((slot, i) => + createHACheckpointVoterWithSlasher(slot, { nodeId: `ha-node-${i + 1}` }, mockSlashingActions), + ), + ); + + // Each voter enqueues slashing votes for their respective slot + const results = await Promise.all( + voters.map(({ voter }) => { + const [_governancePromise, slashingPromise] = voter.enqueueVotes(); + return slashingPromise; + }), + ); + + // All 5 should succeed since they're for different slots + results.forEach(result => { + expect(result).toBe(true); + }); + + // Verify database - each slot should have exactly one slashing duty + for (const slot of slots) { + const duties = await getDutyRecords(slot); + const slashingDuties = duties.filter(d => d.duty_type === 'SLASHING_VOTE'); + expect(slashingDuties).toHaveLength(1); + } + }); + + it('should coordinate both governance and slashing votes independently and send them correctly', async () => { + const slot = SlotNumber(300); + const governancePayload = EthAddress.random(); + const slashingPayload = EthAddress.random(); + + const mockSlashingActions: ProposerSlashAction[] = [ + { + type: 'vote-empire-payload', + payload: slashingPayload, + }, + ]; + + // Create 5 checkpoint voters that will vote on both governance and slashing + const voters = await Promise.all( + Array.from({ length: 5 }, (_, i) => + createHACheckpointVoterWithSlasher( + slot, + { + nodeId: `ha-node-${i + 1}`, + governanceProposerPayload: governancePayload, + }, + mockSlashingActions, + ), + ), + ); + + // All voters enqueue both governance and slashing votes + const allResults = await Promise.all( + voters.map(async ({ voter }) => { + const [governancePromise, slashingPromise] = voter.enqueueVotes(); + return { + governance: await governancePromise, + slashing: await slashingPromise, + }; + }), + ); + + // Count successes for each vote type + const governanceSuccessCount = allResults.filter(r => r.governance === true).length; + const slashingSuccessCount = allResults.filter(r => r.slashing === true).length; + + // Exactly one should succeed for each vote type (they're independent) + expect(governanceSuccessCount).toBe(1); + expect(slashingSuccessCount).toBe(1); + + // Verify database state - both types of duties should be recorded independently + const duties = await getDutyRecords(slot); + const governanceDuties = duties.filter(d => d.duty_type === 'GOVERNANCE_VOTE'); + const slashingDuties = duties.filter(d => d.duty_type === 'SLASHING_VOTE'); + + expect(governanceDuties).toHaveLength(1); + expect(slashingDuties).toHaveLength(1); + + // The winning nodes might be different for each duty type (HA coordination is per-duty) + // This is the realistic scenario where different nodes might win different duties + const governanceWinner = governanceDuties[0].node_id; + const slashingWinner = slashingDuties[0].node_id; + expect(['ha-node-1', 'ha-node-2', 'ha-node-3', 'ha-node-4', 'ha-node-5']).toContain(governanceWinner); + expect(['ha-node-1', 'ha-node-2', 'ha-node-3', 'ha-node-4', 'ha-node-5']).toContain(slashingWinner); + }); + + it('should handle HA coordination across multiple slots with multiple nodes', async () => { + // Test a more realistic scenario: multiple nodes competing for duties across multiple slots + // This verifies that HA coordination works correctly at scale + const governancePayload = EthAddress.random(); + const slashingPayload = EthAddress.random(); + + const mockSlashingActions: ProposerSlashAction[] = [ + { + type: 'vote-empire-payload', + payload: slashingPayload, + }, + ]; + + // Create 3 nodes that will compete for duties across 3 slots + const slots = [SlotNumber(400), SlotNumber(401), SlotNumber(402)]; + const nodeIds = ['ha-node-1', 'ha-node-2', 'ha-node-3']; + + // Each node tries to sign duties for all 3 slots + const allVoters: Array<{ slot: SlotNumber; nodeId: string; voter: CheckpointVoter }> = []; + for (const slot of slots) { + for (const nodeId of nodeIds) { + const { voter } = await createHACheckpointVoterWithSlasher( + slot, + { nodeId, governanceProposerPayload: governancePayload }, + mockSlashingActions, + ); + allVoters.push({ slot, nodeId, voter }); + } + } + + // All voters try to enqueue votes + const results = await Promise.all( + allVoters.map(async ({ voter }) => { + const [govPromise, slashPromise] = voter.enqueueVotes(); + return { governance: await govPromise, slashing: await slashPromise }; + }), + ); + + // For each slot, exactly one node should win each duty type + for (const slot of slots) { + const duties = await getDutyRecords(slot); + const governanceDuties = duties.filter(d => d.duty_type === 'GOVERNANCE_VOTE'); + const slashingDuties = duties.filter(d => d.duty_type === 'SLASHING_VOTE'); + + expect(governanceDuties).toHaveLength(1); + expect(slashingDuties).toHaveLength(1); + } + + // Verify overall: 3 slots × 2 duty types = 6 total successful enqueues + const totalSuccesses = + results.filter(r => r.governance === true).length + results.filter(r => r.slashing === true).length; + expect(totalSuccesses).toBe(6); + }); + }); + + describe('Publisher request sending', () => { + it('should verify that different nodes with different signed duties can all send their requests', async () => { + // This tests the realistic HA scenario: + // - Node A wins governance vote signing + // - Node B wins slashing vote signing + // - Both can independently send their enqueued requests to L1 + const slot = SlotNumber(450); + const governancePayload = EthAddress.random(); + const slashingPayload = EthAddress.random(); + + const mockSlashingActions: ProposerSlashAction[] = [{ type: 'vote-empire-payload', payload: slashingPayload }]; + + // Node A: only governance (no slashing actions) + const { voter: voterA, publisher: publisherA } = await createHACheckpointVoterWithSlasher( + slot, + { nodeId: 'ha-node-A', governanceProposerPayload: governancePayload }, + [], + ); + + // Node B: only slashing (no governance payload) + const { voter: voterB, publisher: publisherB } = await createHACheckpointVoterWithSlasher( + slot, + { nodeId: 'ha-node-B', governanceProposerPayload: undefined }, // No governance + mockSlashingActions, + ); + + // Clear mock calls before enqueuing to have clean assertions + governanceProposerContract.createSignalRequestWithSignature.mockClear(); + slashingProposerContract.createSignalRequestWithSignature.mockClear(); + forwardSpy.mockClear(); + + // Mock forwardSpy to simulate successful L1 transaction submission + forwardSpy.mockResolvedValue({ + receipt: { + transactionHash: '0x123', + status: 'success', + logs: [], + } as any, + errorMsg: undefined, + }); + + // Each node enqueues their respective votes + const [govA] = voterA.enqueueVotes(); + const [, slashB] = voterB.enqueueVotes(); + + const governanceResult = await govA; + const slashingResult = await slashB; + + // Both should succeed since they're not competing + expect(governanceResult).toBe(true); + expect(slashingResult).toBe(true); + + // Verify different nodes handled different duties in the database + const duties = await getDutyRecords(slot); + const governanceDuty = duties.find(d => d.duty_type === 'GOVERNANCE_VOTE'); + const slashingDuty = duties.find(d => d.duty_type === 'SLASHING_VOTE'); + + expect(governanceDuty?.node_id).toBe('ha-node-A'); + expect(slashingDuty?.node_id).toBe('ha-node-B'); + + // Now verify that each publisher can actually send its enqueued request + // Node A sends governance vote + const resultA = await publisherA.sendRequests(); + expect(resultA).toBeDefined(); + + // Verify Node A's publisher created the governance signal with signature + expect(governanceProposerContract.createSignalRequestWithSignature).toHaveBeenCalledTimes(1); + expect(governanceProposerContract.createSignalRequestWithSignature).toHaveBeenCalledWith( + governancePayload.toString(), + slot, + expect.any(Number), // chainId + expect.any(String), // signerAddress + expect.any(Function), // signer function + ); + + // Verify Node A's request was sent to L1 via Multicall3.forward + expect(forwardSpy).toHaveBeenCalledTimes(1); + + // Clear the forward spy for Node B + forwardSpy.mockClear(); + + // Node B sends slashing vote + const resultB = await publisherB.sendRequests(); + expect(resultB).toBeDefined(); + + // Verify Node B's publisher created the slashing signal with signature + expect(slashingProposerContract.createSignalRequestWithSignature).toHaveBeenCalledTimes(1); + expect(slashingProposerContract.createSignalRequestWithSignature).toHaveBeenCalledWith( + slashingPayload.toString(), + slot, + expect.any(Number), // chainId + expect.any(String), // signerAddress + expect.any(Function), // signer function + ); + + // Verify Node B's request was sent to L1 via Multicall3.forward + expect(forwardSpy).toHaveBeenCalledTimes(1); + }); + }); + + describe('Error handling and edge cases', () => { + it('should handle concurrent attempts with proper error handling (no exceptions)', async () => { + const slot = SlotNumber(500); + const governancePayload = EthAddress.random(); + + // Create 3 voters + const voters = await Promise.all( + Array.from({ length: 3 }, (_, i) => + createHACheckpointVoter(slot, { + nodeId: `ha-node-${i + 1}`, + governanceProposerPayload: governancePayload, + }), + ), + ); + + // Try to enqueue votes - should never throw, always return boolean + const results = await Promise.all( + voters.map(({ voter }) => { + const [governancePromise, _slashingPromise] = voter.enqueueVotes(); + return governancePromise; + }), + ); + + // Count successes - enqueueVotes returns true/false, never throws + const successCount = results.filter(r => r === true).length; + const failureCount = results.filter(r => r === false).length; + + // One should succeed, others should gracefully fail (return false, not throw) + expect(successCount).toBe(1); + expect(failureCount).toBe(2); + + // Verify only one duty was recorded in the database + const duties = await getDutyRecords(slot); + expect(duties.filter(d => d.duty_type === 'GOVERNANCE_VOTE')).toHaveLength(1); + }); + }); +}); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ts index a5f00190b929..5a6ff07c03ee 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ts @@ -5,6 +5,8 @@ import type { SlasherClientInterface } from '@aztec/slasher'; import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import type { ResolvedSequencerConfig } from '@aztec/stdlib/interfaces/server'; import type { ValidatorClient } from '@aztec/validator-client'; +import { DutyAlreadySignedError } from '@aztec/validator-ha-signer/errors'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import type { TypedDataDefinition } from 'viem'; @@ -17,7 +19,8 @@ import type { SequencerRollupConstants } from './types.js'; */ export class CheckpointVoter { private slotTimestamp: bigint; - private signer: (msg: TypedDataDefinition) => Promise<`0x${string}`>; + private governanceSigner: (msg: TypedDataDefinition) => Promise<`0x${string}`>; + private slashingSigner: (msg: TypedDataDefinition) => Promise<`0x${string}`>; constructor( private readonly slot: SlotNumber, @@ -31,8 +34,16 @@ export class CheckpointVoter { private readonly log: Logger, ) { this.slotTimestamp = getTimestampForSlot(this.slot, this.l1Constants); - this.signer = (msg: TypedDataDefinition) => - this.validatorClient.signWithAddress(this.attestorAddress, msg).then(s => s.toString()); + + // Create separate signers with appropriate duty contexts for governance and slashing votes + // These use HA protection to ensure only one node signs per slot/duty + const governanceContext: SigningContext = { slot: this.slot, dutyType: DutyType.GOVERNANCE_VOTE }; + this.governanceSigner = (msg: TypedDataDefinition) => + this.validatorClient.signWithAddress(this.attestorAddress, msg, governanceContext).then(s => s.toString()); + + const slashingContext: SigningContext = { slot: this.slot, dutyType: DutyType.SLASHING_VOTE }; + this.slashingSigner = (msg: TypedDataDefinition) => + this.validatorClient.signWithAddress(this.attestorAddress, msg, slashingContext).then(s => s.toString()); } /** @@ -68,10 +79,17 @@ export class CheckpointVoter { this.slot, this.slotTimestamp, this.attestorAddress, - this.signer, + this.governanceSigner, ); } catch (err) { - this.log.error(`Error enqueuing governance vote`, err, { slot: this.slot }); + if (err instanceof DutyAlreadySignedError) { + this.log.info(`Governance vote already signed by another node`, { + slot: this.slot, + signedByNode: err.signedByNode, + }); + } else { + this.log.error(`Error enqueueing governance vote`, err); + } return false; } } @@ -95,10 +113,17 @@ export class CheckpointVoter { this.slot, this.slotTimestamp, this.attestorAddress, - this.signer, + this.slashingSigner, ); } catch (err) { - this.log.error(`Error enqueuing slashing vote`, err, { slot: this.slot }); + if (err instanceof DutyAlreadySignedError) { + this.log.info(`Slashing vote already signed by another node`, { + slot: this.slot, + signedByNode: err.signedByNode, + }); + } else { + this.log.error(`Error enqueueing slashing vote`, err); + } return false; } } diff --git a/yarn-project/sequencer-client/tsconfig.json b/yarn-project/sequencer-client/tsconfig.json index 55899f3f180c..4abe6f06daee 100644 --- a/yarn-project/sequencer-client/tsconfig.json +++ b/yarn-project/sequencer-client/tsconfig.json @@ -69,6 +69,9 @@ { "path": "../validator-client" }, + { + "path": "../validator-ha-signer" + }, { "path": "../world-state" }, diff --git a/yarn-project/stdlib/package.json b/yarn-project/stdlib/package.json index 0ce67e372b1c..6f6ab0ea1ca5 100644 --- a/yarn-project/stdlib/package.json +++ b/yarn-project/stdlib/package.json @@ -85,6 +85,7 @@ "@aztec/foundation": "workspace:^", "@aztec/l1-artifacts": "workspace:^", "@aztec/noir-noirc_abi": "portal:../../noir/packages/noirc_abi", + "@aztec/validator-ha-signer": "workspace:^", "@google-cloud/storage": "^7.15.0", "axios": "^1.12.0", "json-stringify-deterministic": "1.0.12", diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts index 11e2f33b0e4c..d7a996a96e66 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts @@ -164,6 +164,11 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { attestationPollingIntervalMs: 1000, validatorReexecute: true, disableTransactions: false, + haSigningEnabled: false, + nodeId: 'test-node-id', + pollingIntervalMs: 50, + signingTimeoutMs: 3000, + maxStuckDutiesAgeMs: 72000, }); } startSnapshotUpload(_location: string): Promise { diff --git a/yarn-project/stdlib/src/interfaces/validator.ts b/yarn-project/stdlib/src/interfaces/validator.ts index 69df7a83ca24..115fcfb936ee 100644 --- a/yarn-project/stdlib/src/interfaces/validator.ts +++ b/yarn-project/stdlib/src/interfaces/validator.ts @@ -1,3 +1,4 @@ +import type { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import type { SecretValue } from '@aztec/foundation/config'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; @@ -14,6 +15,7 @@ import type { } from '@aztec/stdlib/p2p'; import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, Tx } from '@aztec/stdlib/tx'; +import { type ValidatorHASignerConfig, ValidatorHASignerConfigSchema } from '@aztec/validator-ha-signer/config'; import type { PeerId } from '@libp2p/interface'; import { z } from 'zod'; @@ -24,7 +26,7 @@ import { AllowedElementSchema } from './allowed_element.js'; /** * Validator client configuration */ -export interface ValidatorClientConfig { +export type ValidatorClientConfig = ValidatorHASignerConfig & { /** The private keys of the validators participating in attestation duties */ validatorPrivateKeys?: SecretValue<`0x${string}`[]>; @@ -56,7 +58,7 @@ export interface ValidatorClientConfig { // TODO(palla/mbps): Change default to false once block sync is stable /** Skip pushing re-executed blocks to archiver (default: true) */ skipPushProposedBlocksToArchiver?: boolean; -} +}; export type ValidatorClientFullConfig = ValidatorClientConfig & Pick & @@ -69,7 +71,7 @@ export type ValidatorClientFullConfig = ValidatorClientConfig & }; export const ValidatorClientConfigSchema = zodFor>()( - z.object({ + ValidatorHASignerConfigSchema.extend({ validatorAddresses: z.array(schemas.EthAddress).optional(), disableValidator: z.boolean(), disabledValidators: z.array(schemas.EthAddress), @@ -144,5 +146,7 @@ export interface Validator { signAttestationsAndSigners( attestationsAndSigners: CommitteeAttestationsAndSigners, proposer: EthAddress, + slot: SlotNumber, + blockNumber: BlockNumber | CheckpointNumber, ): Promise; } diff --git a/yarn-project/stdlib/src/p2p/block_proposal.ts b/yarn-project/stdlib/src/p2p/block_proposal.ts index c9aa9f21fbd2..a27a18bb51e5 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.ts @@ -6,6 +6,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import type { L2Block } from '../block/l2_block.js'; import type { L2BlockInfo } from '../block/l2_block_info.js'; @@ -125,7 +126,7 @@ export class BlockProposal extends Gossipable { archiveRoot: Fr, txHashes: TxHash[], txs: Tx[] | undefined, - payloadSigner: (payload: Buffer32) => Promise, + payloadSigner: (payload: Buffer32, context: SigningContext) => Promise, ): Promise { // Create a temporary proposal to get the payload to sign const tempProposal = new BlockProposal( @@ -137,13 +138,23 @@ export class BlockProposal extends Gossipable { Signature.empty(), ); + // Create the block signing context + const blockContext: SigningContext = { + slot: blockHeader.globalVariables.slotNumber, + blockNumber: blockHeader.globalVariables.blockNumber, + blockIndexWithinCheckpoint: indexWithinCheckpoint, + dutyType: DutyType.BLOCK_PROPOSAL, + }; + const hashed = getHashedSignaturePayload(tempProposal, SignatureDomainSeparator.blockProposal); - const sig = await payloadSigner(hashed); + const sig = await payloadSigner(hashed, blockContext); // If txs are provided, sign them as well let signedTxs: SignedTxs | undefined; if (txs) { - signedTxs = await SignedTxs.createFromSigner(txs, payloadSigner); + const txsSigningContext: SigningContext = { dutyType: DutyType.TXS }; + const txsSigner = (payload: Buffer32) => payloadSigner(payload, txsSigningContext); + signedTxs = await SignedTxs.createFromSigner(txs, txsSigner); } return new BlockProposal(blockHeader, indexWithinCheckpoint, inHash, archiveRoot, txHashes, sig, signedTxs); diff --git a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts index bf2f1ac80581..e712bd105606 100644 --- a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts +++ b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts @@ -6,6 +6,7 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import type { L2BlockInfo } from '../block/l2_block_info.js'; import { MAX_TXS_PER_BLOCK } from '../deserialization/index.js'; @@ -152,13 +153,18 @@ export class CheckpointProposal extends Gossipable { checkpointHeader: CheckpointHeader, archiveRoot: Fr, lastBlockInfo: CheckpointLastBlockData | undefined, - payloadSigner: (payload: Buffer32) => Promise, + payloadSigner: (payload: Buffer32, context: SigningContext) => Promise, ): Promise { - // Sign the checkpoint payload + // Sign the checkpoint payload with CHECKPOINT_PROPOSAL duty type const tempProposal = new CheckpointProposal(checkpointHeader, archiveRoot, Signature.empty(), undefined); - const checkpointHash = getHashedSignaturePayload(tempProposal, SignatureDomainSeparator.checkpointProposal); - const checkpointSignature = await payloadSigner(checkpointHash); + + const checkpointContext: SigningContext = { + slot: checkpointHeader.slotNumber, + blockNumber: lastBlockInfo?.blockHeader?.globalVariables.blockNumber ?? BlockNumber(0), + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }; + const checkpointSignature = await payloadSigner(checkpointHash, checkpointContext); if (!lastBlockInfo) { return new CheckpointProposal(checkpointHeader, archiveRoot, checkpointSignature); diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index f51522b5555c..13a32ce71958 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -569,7 +569,7 @@ export const makeBlockProposal = (options?: MakeBlockProposalOptions): Promise Promise.resolve(signer.signMessage(payload)), + (_payload, _context) => Promise.resolve(signer.signMessage(_payload)), ); }; diff --git a/yarn-project/stdlib/tsconfig.json b/yarn-project/stdlib/tsconfig.json index 96ad9c145b41..37a8739eb6b2 100644 --- a/yarn-project/stdlib/tsconfig.json +++ b/yarn-project/stdlib/tsconfig.json @@ -20,6 +20,9 @@ }, { "path": "../l1-artifacts" + }, + { + "path": "../validator-ha-signer" } ], "include": ["src"] diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 678fa86a7ab0..52d0cb93af90 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -155,19 +155,42 @@ Time | Proposer | Validator ## Configuration -| Flag | Purpose | -|------|---------| -| `validatorReexecute` | Re-execute transactions to verify proposals | -| `fishermanMode` | Validate proposals but don't broadcast attestations (monitoring only) | -| `alwaysReexecuteBlockProposals` | Force re-execution even when not in committee | -| `slashBroadcastedInvalidBlockPenalty` | Penalty amount for invalid proposals (0 = disabled) | -| `validatorReexecuteDeadlineMs` | Time reserved at end of slot for propagation/publishing | -| `attestationPollingIntervalMs` | How often to poll for attestations when collecting | -| `disabledValidators` | Validator addresses to exclude from duties | +| Flag | Purpose | +| ------------------------------------- | --------------------------------------------------------------------- | +| `validatorReexecute` | Re-execute transactions to verify proposals | +| `fishermanMode` | Validate proposals but don't broadcast attestations (monitoring only) | +| `alwaysReexecuteBlockProposals` | Force re-execution even when not in committee | +| `slashBroadcastedInvalidBlockPenalty` | Penalty amount for invalid proposals (0 = disabled) | +| `validatorReexecuteDeadlineMs` | Time reserved at end of slot for propagation/publishing | +| `attestationPollingIntervalMs` | How often to poll for attestations when collecting | +| `disabledValidators` | Validator addresses to exclude from duties | + +### High Availability (HA) Keystore + +When running multiple validator nodes with the same validator keys in a high-availability setup, enable HA signing to prevent double-signing: + +| Environment Variable | Purpose | +| -------------------------------------- | ---------------------------------------------------------------------- | +| `VALIDATOR_HA_SIGNING_ENABLED` | Enable HA signing / slashing protection (default: false) | +| `VALIDATOR_HA_DATABASE_URL` | PostgreSQL connection string for coordination (required when enabled) | +| `VALIDATOR_HA_NODE_ID` | Unique identifier for this validator node (required when enabled) | +| `VALIDATOR_HA_POLLING_INTERVAL_MS` | How often to check duty status (default: 100) | +| `VALIDATOR_HA_SIGNING_TIMEOUT_MS` | Max wait for in-progress signing (default: 3000) | +| `VALIDATOR_HA_MAX_STUCK_DUTIES_AGE_MS` | Max age of stuck duties before cleanup (default: 2\*aztecSlotDuration) | + +When `VALIDATOR_HA_SIGNING_ENABLED=true`, the validator client automatically: + +- Creates an HA signer using the provided configuration +- Wraps the base keystore with `HAKeyStore` for HA-protected signing +- Coordinates signing across nodes via PostgreSQL to prevent double-signing +- Provides slashing protection to block conflicting signatures + +See [`@aztec/validator-ha-signer`](../validator-ha-signer/README.md) for more details. ### Fisherman Mode When `fishermanMode: true`, the validator: + - Validates all proposals (block and checkpoint) - Re-executes transactions - Creates attestations internally for validation @@ -179,6 +202,7 @@ This is useful for monitoring network health without participating in consensus. ### Key Methods **ValidatorClient** (`validator.ts`): + - `validateBlockProposal(proposal, sender)` → `boolean`: Validates block, optionally re-executes, emits slash events - `attestToCheckpointProposal(proposal, sender)` → `CheckpointAttestation[]?`: Validates checkpoint and creates attestations - `collectAttestations(proposal, required, deadline)` → `CheckpointAttestation[]`: Waits for attestations from other validators @@ -186,10 +210,12 @@ This is useful for monitoring network health without participating in consensus. - `createCheckpointProposal(...)` → `CheckpointProposal`: Creates and signs a checkpoint proposal **BlockProposalHandler** (`block_proposal_handler.ts`): + - `handleBlockProposal(proposal, sender, shouldReexecute)` → `ValidationResult`: Full block validation pipeline - `reexecuteTransactions(proposal, blockNumber, txs, messages)` → `ReexecutionResult`: Re-runs transactions and compares state **ValidationService** (`duties/validation_service.ts`): + - `createBlockProposal(...)` → `BlockProposal`: Signs block proposal with validator key - `createCheckpointProposal(...)` → `CheckpointProposal`: Signs checkpoint proposal - `attestToCheckpointProposal(proposal, attestors)` → `CheckpointAttestation[]`: Creates attestations for given addresses diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index 4b7519b2a060..20581871ad91 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -79,12 +79,14 @@ "@aztec/slasher": "workspace:^", "@aztec/stdlib": "workspace:^", "@aztec/telemetry-client": "workspace:^", + "@aztec/validator-ha-signer": "workspace:^", "koa": "^2.16.1", "koa-router": "^13.1.1", "tslib": "^2.4.0", "viem": "npm:@aztec/viem@2.38.2" }, "devDependencies": { + "@electric-sql/pglite": "^0.3.14", "@jest/globals": "^30.0.0", "@types/jest": "^30.0.0", "@types/node": "^22.15.17", diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 71772402ff80..ce8356f86137 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -7,6 +7,7 @@ import { } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { ValidatorClientConfig } from '@aztec/stdlib/interfaces/server'; +import { validatorHASignerConfigMappings } from '@aztec/validator-ha-signer/config'; export type { ValidatorClientConfig }; @@ -75,6 +76,7 @@ export const validatorClientConfigMappings: ConfigMappingsType { expect(attestations[0].getSender()).toEqual(addresses[0]); expect(attestations[1].getSender()).toEqual(addresses[1]); }); + + it('creates checkpoint proposal with different duty types for checkpoint and block', async () => { + // This test verifies the fix for HA double-signing issue where both checkpoint + // and block were incorrectly using the same CHECKPOINT_PROPOSAL duty type. + // Now they should use CHECKPOINT_PROPOSAL and BLOCK_PROPOSAL respectively. + + const txs = await Promise.all([Tx.random(), Tx.random()]); + const l2BlockHeader = makeL2BlockHeader(1, 2, 3); + const blockHeader = l2BlockHeader.toBlockHeader(); + const indexWithinCheckpoint = 0; + const archive = Fr.random(); + + // Create a spy keystore to capture signing contexts + const capturedContexts: Array<{ dutyType: DutyType; blockIndexWithinCheckpoint?: number }> = []; + const spyStore = { + ...store, + signMessageWithAddress: (address: EthAddress, message: Buffer32, context: any) => { + capturedContexts.push({ + dutyType: context.dutyType, + blockIndexWithinCheckpoint: context.blockIndexWithinCheckpoint, + }); + return store.signMessageWithAddress(address, message, context); + }, + getAddress: (index: number) => store.getAddress(index), + getAddresses: () => store.getAddresses(), + }; + const spyService = new ValidationService(spyStore as any); + + // Create checkpoint header + const checkpointHeader = l2BlockHeader.toCheckpointHeader(); + + // Create checkpoint proposal with lastBlock + const proposal = await spyService.createCheckpointProposal( + checkpointHeader, + archive, + { + blockHeader, + indexWithinCheckpoint, + txs, + }, + addresses[0], + { publishFullTxs: true }, + ); + + // Verify proposal was created successfully + expect(proposal.getSender()).toEqual(addresses[0]); + expect(proposal.lastBlock).toBeDefined(); + + // Verify we captured signing operations: + // 1. CHECKPOINT_PROPOSAL for the checkpoint itself + // 2. BLOCK_PROPOSAL for the block itself + // 3. TXS for the SignedTxs + expect(capturedContexts.length).toBe(3); + + // Find the checkpoint and block signatures + const checkpointSigs = capturedContexts.filter(c => c.dutyType === DutyType.CHECKPOINT_PROPOSAL); + const blockSigs = capturedContexts.filter(c => c.dutyType === DutyType.BLOCK_PROPOSAL); + const txsSigs = capturedContexts.filter(c => c.dutyType === DutyType.TXS); + + // Should have exactly 1 checkpoint signature (no blockIndexWithinCheckpoint) + expect(checkpointSigs.length).toBe(1); + expect(checkpointSigs[0].blockIndexWithinCheckpoint).toBeUndefined(); + + // Should have exactly 2 block signatures (both with blockIndexWithinCheckpoint) + // One for the block proposal, one for the SignedTxs + expect(blockSigs.length).toBe(1); + expect(blockSigs[0].blockIndexWithinCheckpoint).toBe(indexWithinCheckpoint); + + // Should have exactly 1 txs signature + expect(txsSigs.length).toBe(1); + expect(txsSigs[0].blockIndexWithinCheckpoint).toBeUndefined(); + }); }); diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 4a1ab7c90cc4..bdfc48bc7d3f 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -1,3 +1,4 @@ +import { BlockNumber, type CheckpointNumber, type SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto/keccak'; import { Fr } from '@aztec/foundation/curves/bn254'; @@ -18,6 +19,8 @@ import { } from '@aztec/stdlib/p2p'; import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, Tx } from '@aztec/stdlib/tx'; +import { DutyAlreadySignedError, SlashingProtectionError } from '@aztec/validator-ha-signer/errors'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import type { ValidatorKeyStore } from '../key_store/interface.js'; @@ -31,34 +34,40 @@ export class ValidationService { * Create a block proposal with the given header, archive, and transactions * * @param blockHeader - The block header - * @param indexWithinCheckpoint - Index of this block within the checkpoint (0-indexed) + * @param blockIndexWithinCheckpoint - The block index within checkpoint for HA signing context * @param inHash - Hash of L1 to L2 messages for this checkpoint * @param archive - The archive of the current block - * @param txs - TxHash[] ordered list of transactions + * @param txs - Ordered list of transactions (Tx[]) + * @param proposerAttesterAddress - The address of the proposer/attester, or undefined * @param options - Block proposal options (including broadcastInvalidBlockProposal for testing) * * @returns A block proposal signing the above information + * @throws DutyAlreadySignedError if HA signer indicates duty already signed by another node + * @throws SlashingProtectionError if attempting to sign different data for same slot */ public createBlockProposal( blockHeader: BlockHeader, - indexWithinCheckpoint: number, + blockIndexWithinCheckpoint: number, inHash: Fr, archive: Fr, txs: Tx[], proposerAttesterAddress: EthAddress | undefined, options: BlockProposalOptions, ): Promise { - const payloadSigner = this.getPayloadSigner(proposerAttesterAddress); - // For testing: change the new archive to trigger state_mismatch validation failure if (options.broadcastInvalidBlockProposal) { archive = Fr.random(); this.log.warn(`Creating INVALID block proposal for slot ${blockHeader.globalVariables.slotNumber}`); } + // Create a signer that uses the appropriate address + const address = proposerAttesterAddress ?? this.keyStore.getAddress(0); + const payloadSigner = (payload: Buffer32, context: SigningContext) => + this.keyStore.signMessageWithAddress(address, payload, context); + return BlockProposal.createProposalFromSigner( blockHeader, - indexWithinCheckpoint, + blockIndexWithinCheckpoint, inHash, archive, txs.map(tx => tx.getTxHash()), @@ -85,14 +94,18 @@ export class ValidationService { proposerAttesterAddress: EthAddress | undefined, options: CheckpointProposalOptions, ): Promise { - const payloadSigner = this.getPayloadSigner(proposerAttesterAddress); - // For testing: change the archive to trigger state_mismatch validation failure if (options.broadcastInvalidCheckpointProposal) { archive = Fr.random(); this.log.warn(`Creating INVALID checkpoint proposal for slot ${checkpointHeader.slotNumber}`); } + // Create a signer that takes payload and context, and uses the appropriate address + const payloadSigner = (payload: Buffer32, context: SigningContext) => { + const address = proposerAttesterAddress ?? this.keyStore.getAddress(0); + return this.keyStore.signMessageWithAddress(address, payload, context); + }; + // Last block to include in the proposal const lastBlock = lastBlockInfo && { blockHeader: lastBlockInfo.blockHeader, @@ -104,16 +117,6 @@ export class ValidationService { return CheckpointProposal.createProposalFromSigner(checkpointHeader, archive, lastBlock, payloadSigner); } - private getPayloadSigner(proposerAttesterAddress: EthAddress | undefined): (payload: Buffer32) => Promise { - if (proposerAttesterAddress !== undefined) { - return (payload: Buffer32) => this.keyStore.signMessageWithAddress(proposerAttesterAddress, payload); - } else { - // if there is no proposer attester address, just use the first signer - const signer = this.keyStore.getAddress(0); - return (payload: Buffer32) => this.keyStore.signMessageWithAddress(signer, payload); - } - } - /** * Attest with selection of validators to the given checkpoint proposal * @@ -133,19 +136,79 @@ export class ValidationService { const buf = Buffer32.fromBuffer( keccak256(payload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation)), ); - const signatures = await Promise.all( - attestors.map(attestor => this.keyStore.signMessageWithAddress(attestor, buf)), + + // TODO(spy/ha): Use checkpointNumber instead of blockNumber once CheckpointHeader includes it. + // Currently using lastBlock.blockNumber as a proxy for checkpoint identification in HA signing. + // blockNumber is NOT used for the primary key so it's safe to use here. + // See CheckpointHeader TODO and SigningContext types documentation. + let blockNumber: BlockNumber; + try { + blockNumber = proposal.blockNumber; + } catch { + // Checkpoint proposal may not have lastBlock, use 0 as fallback + blockNumber = BlockNumber(0); + } + const context: SigningContext = { + slot: proposal.slotNumber, + blockNumber, + dutyType: DutyType.ATTESTATION, + }; + + // Sign each attestor in parallel, catching HA errors per-attestor + const results = await Promise.allSettled( + attestors.map(async attestor => { + const sig = await this.keyStore.signMessageWithAddress(attestor, buf, context); + // return new BlockAttestation(proposal.payload, sig, proposal.signature); + return new CheckpointAttestation(payload, sig, proposal.signature); + }), ); - return signatures.map(sig => new CheckpointAttestation(payload, sig, proposal.signature)); + + const attestations: CheckpointAttestation[] = []; + for (let i = 0; i < results.length; i++) { + const result = results[i]; + if (result.status === 'fulfilled') { + attestations.push(result.value); + } else { + const error = result.reason; + if (error instanceof DutyAlreadySignedError || error instanceof SlashingProtectionError) { + this.log.info( + `Attestation for slot ${proposal.slotNumber} by ${attestors[i]} already signed by another High-Availability node`, + ); + // Continue with remaining attestors + } else { + throw error; + } + } + } + + return attestations; } - async signAttestationsAndSigners( + /** + * Sign attestations and signers payload + * @param attestationsAndSigners - The attestations and signers to sign + * @param proposer - The proposer address to sign with + * @param slot - The slot number for HA signing context + * @param blockNumber - The block or checkpoint number for HA signing context + * @returns signature + * @throws DutyAlreadySignedError if already signed by another HA node + * @throws SlashingProtectionError if attempting to sign different data for same slot + */ + signAttestationsAndSigners( attestationsAndSigners: CommitteeAttestationsAndSigners, proposer: EthAddress, + slot: SlotNumber, + blockNumber: BlockNumber | CheckpointNumber, ): Promise { + const context: SigningContext = { + slot, + blockNumber, + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }; + const buf = Buffer32.fromBuffer( keccak256(attestationsAndSigners.getPayloadToSign(SignatureDomainSeparator.attestationsAndSigners)), ); - return await this.keyStore.signMessageWithAddress(proposer, buf); + return this.keyStore.signMessageWithAddress(proposer, buf, context); } } diff --git a/yarn-project/validator-client/src/key_store/ha_key_store.test.ts b/yarn-project/validator-client/src/key_store/ha_key_store.test.ts new file mode 100644 index 000000000000..b0233f4bce01 --- /dev/null +++ b/yarn-project/validator-client/src/key_store/ha_key_store.test.ts @@ -0,0 +1,442 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import type { Signature } from '@aztec/foundation/eth-signature'; +import type { EthRemoteSignerConfig } from '@aztec/node-keystore'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { DutyAlreadySignedError, SlashingProtectionError } from '@aztec/validator-ha-signer/errors'; +import { DutyType, type SigningContext, isHAProtectedContext } from '@aztec/validator-ha-signer/types'; +import type { ValidatorHASigner } from '@aztec/validator-ha-signer/validator-ha-signer'; + +import { beforeEach, describe, expect, it, jest } from '@jest/globals'; +import type { TypedDataDefinition } from 'viem'; + +import { HAKeyStore } from './ha_key_store.js'; +import type { ExtendedValidatorKeyStore } from './interface.js'; + +// Test data constants +const VALIDATOR_ADDRESS = EthAddress.random(); +const VALIDATOR_ADDRESS_2 = EthAddress.random(); +const SIGNING_ROOT = Buffer32.random(); +const NODE_ID = 'test-node-1'; +const SIGNATURE_STRING = '0xsignature123'; + +// Mock signature +const mockSignature = { + toString: () => SIGNATURE_STRING, + r: Buffer32.random(), + s: Buffer32.random(), + v: 27, + isEmpty: false, +} as unknown as Signature; + +// Mock typed data +const mockTypedData: TypedDataDefinition = { + domain: { + name: 'Test', + version: '1', + chainId: 1, + }, + types: { + Message: [{ name: 'content', type: 'string' }], + }, + primaryType: 'Message', + message: { + content: 'test message', + }, +}; + +describe('HAKeyStore', () => { + let mockBaseKeyStore: jest.Mocked; + let mockHASigner: jest.Mocked; + + beforeEach(() => { + // Create mock base key store + mockBaseKeyStore = { + getAddress: jest.fn<(index: number) => EthAddress>(), + getAddresses: jest.fn<() => EthAddress[]>(), + signTypedData: jest.fn<(typedData: TypedDataDefinition, context: SigningContext) => Promise>(), + signTypedDataWithAddress: + jest.fn<(address: EthAddress, typedData: TypedDataDefinition, context: SigningContext) => Promise>(), + signMessage: jest.fn<(message: Buffer32, context: SigningContext) => Promise>(), + signMessageWithAddress: + jest.fn<(address: EthAddress, message: Buffer32, context: SigningContext) => Promise>(), + getAttesterAddresses: jest.fn<() => EthAddress[]>(), + getCoinbaseAddress: jest.fn<(attesterAddress: EthAddress) => EthAddress>(), + getPublisherAddresses: jest.fn<(attesterAddress: EthAddress) => EthAddress[]>(), + getFeeRecipient: jest.fn<(attesterAddress: EthAddress) => AztecAddress>(), + getRemoteSignerConfig: jest.fn<(attesterAddress: EthAddress) => EthRemoteSignerConfig | undefined>(), + start: jest.fn<() => Promise>(), + stop: jest.fn<() => Promise>(), + }; + + // Create mock HA signer + mockHASigner = { + isEnabled: true, + nodeId: NODE_ID, + signWithProtection: jest.fn(), + start: jest.fn(), + stop: jest.fn(), + } as unknown as jest.Mocked; + + // Default implementations + mockBaseKeyStore.getAddress.mockReturnValue(VALIDATOR_ADDRESS); + mockBaseKeyStore.getAddresses.mockReturnValue([VALIDATOR_ADDRESS, VALIDATOR_ADDRESS_2]); + mockBaseKeyStore.signMessageWithAddress.mockResolvedValue(mockSignature); + mockBaseKeyStore.signTypedDataWithAddress.mockResolvedValue(mockSignature); + mockBaseKeyStore.signMessage.mockResolvedValue([mockSignature]); + mockBaseKeyStore.signTypedData.mockResolvedValue([mockSignature]); + mockHASigner.signWithProtection.mockResolvedValue(mockSignature); + }); + + describe('ValidatorKeyStore interface delegation (non-HA duties bypass HA)', () => { + let haKeyStore: HAKeyStore; + + beforeEach(() => { + haKeyStore = new HAKeyStore(mockBaseKeyStore, mockHASigner); + }); + + it('should delegate getAddress to base key store', () => { + const result = haKeyStore.getAddress(0); + expect(result).toBe(VALIDATOR_ADDRESS); + expect(mockBaseKeyStore.getAddress).toHaveBeenCalledWith(0); + }); + + it('should delegate getAddresses to base key store', () => { + const result = haKeyStore.getAddresses(); + expect(result).toEqual([VALIDATOR_ADDRESS, VALIDATOR_ADDRESS_2]); + expect(mockBaseKeyStore.getAddresses).toHaveBeenCalled(); + }); + + describe('AUTH_REQUEST duties bypass HA', () => { + const authRequestContext: SigningContext = { dutyType: DutyType.AUTH_REQUEST }; + + it('should delegate signTypedData to base key store for AUTH_REQUEST', async () => { + const result = await haKeyStore.signTypedData(mockTypedData, authRequestContext); + expect(result).toEqual([mockSignature]); + expect(mockBaseKeyStore.signTypedData).toHaveBeenCalledWith(mockTypedData, authRequestContext); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signMessage to base key store for AUTH_REQUEST', async () => { + const result = await haKeyStore.signMessage(SIGNING_ROOT, authRequestContext); + expect(result).toEqual([mockSignature]); + expect(mockBaseKeyStore.signMessage).toHaveBeenCalledWith(SIGNING_ROOT, authRequestContext); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signTypedDataWithAddress without HA for AUTH_REQUEST', async () => { + const result = await haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, authRequestContext); + expect(result).toBe(mockSignature); + expect(mockBaseKeyStore.signTypedDataWithAddress).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + mockTypedData, + authRequestContext, + ); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signMessageWithAddress without HA for AUTH_REQUEST', async () => { + const result = await haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, authRequestContext); + expect(result).toBe(mockSignature); + expect(mockBaseKeyStore.signMessageWithAddress).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + SIGNING_ROOT, + authRequestContext, + ); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + }); + + describe('TXS duties bypass HA', () => { + const txsContext: SigningContext = { dutyType: DutyType.TXS }; + + it('should delegate signTypedData to base key store for TXS', async () => { + const result = await haKeyStore.signTypedData(mockTypedData, txsContext); + expect(result).toEqual([mockSignature]); + expect(mockBaseKeyStore.signTypedData).toHaveBeenCalledWith(mockTypedData, txsContext); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signMessage to base key store for TXS', async () => { + const result = await haKeyStore.signMessage(SIGNING_ROOT, txsContext); + expect(result).toEqual([mockSignature]); + expect(mockBaseKeyStore.signMessage).toHaveBeenCalledWith(SIGNING_ROOT, txsContext); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signTypedDataWithAddress without HA for TXS', async () => { + const result = await haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, txsContext); + expect(result).toBe(mockSignature); + expect(mockBaseKeyStore.signTypedDataWithAddress).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + mockTypedData, + txsContext, + ); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + + it('should delegate signMessageWithAddress without HA for TXS', async () => { + const result = await haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, txsContext); + expect(result).toBe(mockSignature); + expect(mockBaseKeyStore.signMessageWithAddress).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + SIGNING_ROOT, + txsContext, + ); + expect(mockHASigner.signWithProtection).not.toHaveBeenCalled(); + }); + }); + }); + + describe('signMessageWithAddress with context (HA enabled)', () => { + let haKeyStore: HAKeyStore; + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }; + + beforeEach(() => { + haKeyStore = new HAKeyStore(mockBaseKeyStore, mockHASigner); + }); + + it('should return signature on successful signing', async () => { + const result = await haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context); + + expect(result).toBe(mockSignature); + expect(mockHASigner.signWithProtection).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + SIGNING_ROOT, + { + slot: context.slot, + blockNumber: context.blockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + expect.any(Function), + ); + }); + + it('should throw DutyAlreadySignedError when duty was already signed', async () => { + const error = new DutyAlreadySignedError(SlotNumber(100), DutyType.BLOCK_PROPOSAL, 0, 'other-node'); + mockHASigner.signWithProtection.mockRejectedValue(error); + + await expect(haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context)).rejects.toThrow( + DutyAlreadySignedError, + ); + }); + + it('should throw SlashingProtectionError when slashing protection triggers', async () => { + const error = new SlashingProtectionError( + SlotNumber(100), + DutyType.BLOCK_PROPOSAL, + 0, + '0xexisting', + '0xattempted', + 'other-node', + ); + mockHASigner.signWithProtection.mockRejectedValue(error); + + await expect(haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context)).rejects.toThrow( + SlashingProtectionError, + ); + }); + + it('should re-throw unexpected errors', async () => { + const unexpectedError = new Error('Unexpected error'); + mockHASigner.signWithProtection.mockRejectedValue(unexpectedError); + + await expect(haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context)).rejects.toThrow( + 'Unexpected error', + ); + }); + + it('should call base key store through signWithProtection callback', async () => { + mockHASigner.signWithProtection.mockImplementation((_addr, _root, _ctx, signFn) => { + return signFn(SIGNING_ROOT); + }); + + await haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context); + + expect(mockBaseKeyStore.signMessageWithAddress).toHaveBeenCalledWith(VALIDATOR_ADDRESS, SIGNING_ROOT, context); + }); + }); + + describe('signTypedDataWithAddress with context', () => { + let haKeyStore: HAKeyStore; + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATION, + }; + + beforeEach(() => { + haKeyStore = new HAKeyStore(mockBaseKeyStore, mockHASigner); + }); + + it('should return signature on successful signing', async () => { + const result = await haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, context); + + expect(result).toBe(mockSignature); + expect(mockHASigner.signWithProtection).toHaveBeenCalledWith( + VALIDATOR_ADDRESS, + expect.any(Buffer32), + { + slot: context.slot, + blockNumber: context.blockNumber, + dutyType: DutyType.ATTESTATION, + }, + expect.any(Function), + ); + }); + + it('should throw DutyAlreadySignedError when duty was already signed', async () => { + const error = new DutyAlreadySignedError(SlotNumber(100), DutyType.ATTESTATION, -1, 'other-node'); + mockHASigner.signWithProtection.mockRejectedValue(error); + + await expect(haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, context)).rejects.toThrow( + DutyAlreadySignedError, + ); + }); + + it('should throw SlashingProtectionError when slashing protection triggers', async () => { + const error = new SlashingProtectionError( + SlotNumber(100), + DutyType.ATTESTATION, + -1, + '0xexisting', + '0xattempted', + 'other-node', + ); + mockHASigner.signWithProtection.mockRejectedValue(error); + + await expect(haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, context)).rejects.toThrow( + SlashingProtectionError, + ); + }); + + it('should call base key store signTypedDataWithAddress through callback', async () => { + mockHASigner.signWithProtection.mockImplementation((_addr, _root, _ctx, signFn) => { + return signFn(Buffer32.random()); + }); + + await haKeyStore.signTypedDataWithAddress(VALIDATOR_ADDRESS, mockTypedData, context); + + expect(mockBaseKeyStore.signTypedDataWithAddress).toHaveBeenCalledWith(VALIDATOR_ADDRESS, mockTypedData, context); + }); + }); + + describe('all duty types', () => { + it('should handle all HA-protected duty types', async () => { + const haKeyStore = new HAKeyStore(mockBaseKeyStore, mockHASigner); + + // Build contexts for each HA-protected duty type + const haProtectedContexts: SigningContext[] = [ + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + { slot: SlotNumber(100), blockNumber: BlockNumber(50), dutyType: DutyType.ATTESTATION }, + { slot: SlotNumber(100), blockNumber: BlockNumber(50), dutyType: DutyType.ATTESTATIONS_AND_SIGNERS }, + { slot: SlotNumber(100), blockNumber: BlockNumber(50), dutyType: DutyType.CHECKPOINT_PROPOSAL }, + // Vote duties only need slot (no blockNumber) + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, + { slot: SlotNumber(100), dutyType: DutyType.SLASHING_VOTE }, + ]; + + for (const context of haProtectedContexts) { + const result = await haKeyStore.signMessageWithAddress(VALIDATOR_ADDRESS, SIGNING_ROOT, context); + expect(result).toBe(mockSignature); + } + + expect(mockHASigner.signWithProtection).toHaveBeenCalledTimes(haProtectedContexts.length); + }); + }); + + describe('lifecycle methods', () => { + let haKeyStore: HAKeyStore; + + beforeEach(() => { + haKeyStore = new HAKeyStore(mockBaseKeyStore, mockHASigner); + }); + + it('should run start() on the HA signer', async () => { + await haKeyStore.start(); + expect(mockHASigner.start).toHaveBeenCalledTimes(1); + }); + + it('should run stop() on the HA signer', async () => { + await haKeyStore.stop(); + expect(mockHASigner.stop).toHaveBeenCalledTimes(1); + }); + }); + + describe('isHAProtectedContext type guard', () => { + it('should return false for AUTH_REQUEST', () => { + const context: SigningContext = { dutyType: DutyType.AUTH_REQUEST }; + expect(isHAProtectedContext(context)).toBe(false); + }); + + it('should return false for TXS', () => { + const context: SigningContext = { dutyType: DutyType.TXS }; + expect(isHAProtectedContext(context)).toBe(false); + }); + + it('should return true for BLOCK_PROPOSAL', () => { + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + + it('should return true for ATTESTATION', () => { + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATION, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + + it('should return true for CHECKPOINT_PROPOSAL', () => { + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + + it('should return true for ATTESTATIONS_AND_SIGNERS', () => { + const context: SigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + + it('should return true for GOVERNANCE_VOTE', () => { + const context: SigningContext = { + slot: SlotNumber(100), + dutyType: DutyType.GOVERNANCE_VOTE, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + + it('should return true for SLASHING_VOTE', () => { + const context: SigningContext = { + slot: SlotNumber(100), + dutyType: DutyType.SLASHING_VOTE, + }; + expect(isHAProtectedContext(context)).toBe(true); + }); + }); +}); diff --git a/yarn-project/validator-client/src/key_store/ha_key_store.ts b/yarn-project/validator-client/src/key_store/ha_key_store.ts new file mode 100644 index 000000000000..1327ee980155 --- /dev/null +++ b/yarn-project/validator-client/src/key_store/ha_key_store.ts @@ -0,0 +1,269 @@ +/** + * High Availability Key Store + * + * A ValidatorKeyStore wrapper that adds slashing protection for HA validator setups. + * When multiple validator nodes are running, only one node will sign for a given duty. + */ +import { Buffer32 } from '@aztec/foundation/buffer'; +import type { EthAddress } from '@aztec/foundation/eth-address'; +import type { Signature } from '@aztec/foundation/eth-signature'; +import { createLogger } from '@aztec/foundation/log'; +import type { EthRemoteSignerConfig } from '@aztec/node-keystore'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { DutyAlreadySignedError, SlashingProtectionError } from '@aztec/validator-ha-signer/errors'; +import { + type HAProtectedSigningContext, + type SigningContext, + isHAProtectedContext, +} from '@aztec/validator-ha-signer/types'; +import type { ValidatorHASigner } from '@aztec/validator-ha-signer/validator-ha-signer'; + +import { type TypedDataDefinition, hashTypedData } from 'viem'; + +import type { ExtendedValidatorKeyStore } from './interface.js'; + +/** + * High Availability Key Store + * + * Wraps a base ExtendedValidatorKeyStore and ValidatorHASigner to provide + * HA-protected signing operations (when context is provided). + * + * The extended interface methods (getAttesterAddresses, getCoinbaseAddress, etc.) + * are pure pass-through since they don't require HA coordination. + * + * Usage: + * ```typescript + * const baseKeyStore = NodeKeystoreAdapter.fromPrivateKeys(privateKeys); + * const haSigner = new ValidatorHASigner(db, config); + * const haKeyStore = new HAKeyStore(baseKeyStore, haSigner); + * + * // Without context - signs directly (no HA protection) + * const sig = await haKeyStore.signMessageWithAddress(addr, msg); + * + * // With context - HA protected, throws DutyAlreadySignedError if already signed + * const result = await haKeyStore.signMessageWithAddress(addr, msg, { + * slot: 100n, + * blockNumber: 50n, + * dutyType: DutyType.BLOCK_PROPOSAL, + * }); + * ``` + */ +export class HAKeyStore implements ExtendedValidatorKeyStore { + private readonly log = createLogger('ha-key-store'); + + constructor( + private readonly baseKeyStore: ExtendedValidatorKeyStore, + private readonly haSigner: ValidatorHASigner, + ) { + this.log.info('HAKeyStore initialized', { + nodeId: haSigner.nodeId, + }); + } + + /** + * Sign typed data with all addresses. + * Coordinates across nodes to prevent double-signing for most duty types. + * AUTH_REQUEST and TXS duties bypass HA protection since signing multiple times is safe. + * Returns only signatures that were successfully claimed by this node. + */ + async signTypedData(typedData: TypedDataDefinition, context: SigningContext): Promise { + // no need for HA protection on auth request and txs signatures + if (!isHAProtectedContext(context)) { + return this.baseKeyStore.signTypedData(typedData, context); + } + + // Sign each address with HA protection + const addresses = this.getAddresses(); + const results = await Promise.allSettled( + addresses.map(addr => this.signTypedDataWithAddress(addr, typedData, context)), + ); + + // Filter out failures (already signed by other nodes or other errors) + return results + .filter((result): result is PromiseFulfilledResult => { + if (result.status === 'fulfilled') { + return true; + } + // Log expected HA errors (already signed) at debug level + if (result.reason instanceof DutyAlreadySignedError) { + this.log.debug(`Duty already signed by another node`, { + dutyType: context.dutyType, + slot: context.slot, + signedByNode: result.reason.signedByNode, + }); + return false; + } + // Re-throw unexpected errors + throw result.reason; + }) + .map(result => result.value); + } + + /** + * Sign a message with all addresses. + * Coordinates across nodes to prevent double-signing for most duty types. + * AUTH_REQUEST and TXS duties bypass HA protection since signing multiple times is safe. + * Returns only signatures that were successfully claimed by this node. + */ + async signMessage(message: Buffer32, context: SigningContext): Promise { + // no need for HA protection on auth request and txs signatures + if (!isHAProtectedContext(context)) { + return this.baseKeyStore.signMessage(message, context); + } + + // Sign each address with HA protection + const addresses = this.getAddresses(); + const results = await Promise.allSettled( + addresses.map(addr => this.signMessageWithAddress(addr, message, context)), + ); + + // Filter out failures (already signed by other nodes or other errors) + return results + .filter((result): result is PromiseFulfilledResult => { + if (result.status === 'fulfilled') { + return true; + } + // Log expected HA errors (already signed) at debug level + if (result.reason instanceof DutyAlreadySignedError) { + this.log.debug(`Duty already signed by another node`, { + dutyType: context.dutyType, + slot: context.slot, + signedByNode: result.reason.signedByNode, + }); + return false; + } + // Re-throw unexpected errors + throw result.reason; + }) + .map(result => result.value); + } + + /** + * Sign typed data with a specific address. + * Coordinates across nodes to prevent double-signing for most duty types. + * AUTH_REQUEST and TXS duties bypass HA protection since signing multiple times is safe. + * @throws DutyAlreadySignedError if the duty was already signed by another node + * @throws SlashingProtectionError if attempting to sign different data for the same slot + */ + async signTypedDataWithAddress( + address: EthAddress, + typedData: TypedDataDefinition, + context: SigningContext, + ): Promise { + // AUTH_REQUEST and TXS bypass HA protection - multiple signatures are safe + if (!isHAProtectedContext(context)) { + return this.baseKeyStore.signTypedDataWithAddress(address, typedData, context); + } + + // Compute signing root from typed data for HA tracking + const digest = hashTypedData(typedData); + const messageHash = Buffer32.fromString(digest); + + try { + return await this.haSigner.signWithProtection(address, messageHash, context, () => + this.baseKeyStore.signTypedDataWithAddress(address, typedData, context), + ); + } catch (error) { + this.processSigningError(error, context); + throw error; + } + } + + /** + * Sign a message with a specific address. + * Coordinates across nodes to prevent double-signing for most duty types. + * AUTH_REQUEST and TXS duties bypass HA protection since signing multiple times is safe. + * @throws DutyAlreadySignedError if the duty was already signed by another node + * @throws SlashingProtectionError if attempting to sign different data for the same slot + */ + async signMessageWithAddress(address: EthAddress, message: Buffer32, context: SigningContext): Promise { + // no need for HA protection on auth request and txs signatures + if (!isHAProtectedContext(context)) { + return this.baseKeyStore.signMessageWithAddress(address, message, context); + } + + try { + return await this.haSigner.signWithProtection(address, message, context, messageHash => + this.baseKeyStore.signMessageWithAddress(address, messageHash, context), + ); + } catch (error) { + this.processSigningError(error, context); + throw error; + } + } + + // ───────────────────────────────────────────────────────────────────────────── + // pass-through methods (no HA logic needed) + // ───────────────────────────────────────────────────────────────────────────── + + getAddress(index: number): EthAddress { + return this.baseKeyStore.getAddress(index); + } + + getAddresses(): EthAddress[] { + return this.baseKeyStore.getAddresses(); + } + + getAttesterAddresses(): EthAddress[] { + return this.baseKeyStore.getAttesterAddresses(); + } + + getCoinbaseAddress(attesterAddress: EthAddress): EthAddress { + return this.baseKeyStore.getCoinbaseAddress(attesterAddress); + } + + getPublisherAddresses(attesterAddress: EthAddress): EthAddress[] { + return this.baseKeyStore.getPublisherAddresses(attesterAddress); + } + + getFeeRecipient(attesterAddress: EthAddress): AztecAddress { + return this.baseKeyStore.getFeeRecipient(attesterAddress); + } + + getRemoteSignerConfig(attesterAddress: EthAddress): EthRemoteSignerConfig | undefined { + return this.baseKeyStore.getRemoteSignerConfig(attesterAddress); + } + + /** + * Process signing errors from the HA signer. + * Logs expected HA errors (already signed) at appropriate levels. + * Re-throws unexpected errors. + */ + private processSigningError(error: unknown, context: HAProtectedSigningContext) { + if (error instanceof DutyAlreadySignedError) { + this.log.debug(`Duty already signed by another node with the same payload`, { + dutyType: context.dutyType, + slot: context.slot, + signedByNode: error.signedByNode, + }); + return; + } + + if (error instanceof SlashingProtectionError) { + this.log.warn(`Duty already signed by another node with different payload`, { + dutyType: context.dutyType, + slot: context.slot, + existingMessageHash: error.existingMessageHash, + attemptedMessageHash: error.attemptedMessageHash, + }); + return; + } + + // Re-throw errors + throw error; + } + + /** + * Start the high-availability key store + */ + public start(): Promise { + return Promise.resolve(this.haSigner.start()); + } + + /** + * Stop the high-availability key store + */ + public async stop() { + await this.haSigner.stop(); + } +} diff --git a/yarn-project/validator-client/src/key_store/index.ts b/yarn-project/validator-client/src/key_store/index.ts index 7f50fd483024..189838c41f16 100644 --- a/yarn-project/validator-client/src/key_store/index.ts +++ b/yarn-project/validator-client/src/key_store/index.ts @@ -2,3 +2,4 @@ export * from './interface.js'; export * from './local_key_store.js'; export * from './node_keystore_adapter.js'; export * from './web3signer_key_store.js'; +export * from './ha_key_store.js'; diff --git a/yarn-project/validator-client/src/key_store/interface.ts b/yarn-project/validator-client/src/key_store/interface.ts index 5c9900e1ddb2..b4f4a3f692c7 100644 --- a/yarn-project/validator-client/src/key_store/interface.ts +++ b/yarn-project/validator-client/src/key_store/interface.ts @@ -3,6 +3,7 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; import type { EthRemoteSignerConfig } from '@aztec/node-keystore'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { SigningContext } from '@aztec/validator-ha-signer/types'; import type { TypedDataDefinition } from 'viem'; @@ -26,17 +27,45 @@ export interface ValidatorKeyStore { */ getAddresses(): EthAddress[]; - signTypedData(typedData: TypedDataDefinition): Promise; - signTypedDataWithAddress(address: EthAddress, typedData: TypedDataDefinition): Promise; + /** + * Sign typed data with all keystore private keys + * @param typedData - The complete EIP-712 typed data structure + * @param context - Signing context for HA slashing protection + * @returns signatures (when context provided with HA, only successfully claimed signatures are returned) + */ + signTypedData(typedData: TypedDataDefinition, context: SigningContext): Promise; + + /** + * Sign typed data with a specific address's private key + * @param address - The address of the signer to use + * @param typedData - The complete EIP-712 typed data structure + * @param context - Signing context for HA slashing protection + * @returns signature + */ + signTypedDataWithAddress( + address: EthAddress, + typedData: TypedDataDefinition, + context: SigningContext, + ): Promise; + /** * Flavor of sign message that followed EIP-712 eth signed message prefix * Note: this is only required when we are using ecdsa signatures over secp256k1 * * @param message - The message to sign. - * @returns The signatures. + * @param context - Signing context for HA slashing protection + * @returns The signatures (when context provided with HA, only successfully claimed signatures are returned). */ - signMessage(message: Buffer32): Promise; - signMessageWithAddress(address: EthAddress, message: Buffer32): Promise; + signMessage(message: Buffer32, context: SigningContext): Promise; + + /** + * Sign a message with a specific address's private key + * @param address - The address of the signer to use + * @param message - The message to sign + * @param context - Signing context for HA slashing protection + * @returns signature + */ + signMessageWithAddress(address: EthAddress, message: Buffer32, context: SigningContext): Promise; } /** @@ -79,4 +108,14 @@ export interface ExtendedValidatorKeyStore extends ValidatorKeyStore { * @returns the remote signer configuration or undefined */ getRemoteSignerConfig(attesterAddress: EthAddress): EthRemoteSignerConfig | undefined; + + /** + * Start the key store + */ + start(): Promise; + + /** + * Stop the key store + */ + stop(): Promise; } diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index 39f7dc276b8c..250718c16b80 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -2,6 +2,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; +import type { SigningContext } from '@aztec/validator-ha-signer/types'; import { type TypedDataDefinition, hashTypedData } from 'viem'; @@ -46,9 +47,10 @@ export class LocalKeyStore implements ValidatorKeyStore { /** * Sign a message with all keystore private keys * @param typedData - The complete EIP-712 typed data structure (domain, types, primaryType, message) + * @param _context - Signing context (ignored by LocalKeyStore, used for HA protection) * @return signature */ - public signTypedData(typedData: TypedDataDefinition): Promise { + public signTypedData(typedData: TypedDataDefinition, _context: SigningContext): Promise { const digest = hashTypedData(typedData); return Promise.all(this.signers.map(signer => signer.sign(Buffer32.fromString(digest)))); } @@ -57,10 +59,15 @@ export class LocalKeyStore implements ValidatorKeyStore { * Sign a message with a specific address's private key * @param address - The address of the signer to use * @param typedData - The complete EIP-712 typed data structure (domain, types, primaryType, message) + * @param _context - Signing context (ignored by LocalKeyStore, used for HA protection) * @returns signature for the specified address * @throws Error if the address is not found in the keystore */ - public signTypedDataWithAddress(address: EthAddress, typedData: TypedDataDefinition): Promise { + public signTypedDataWithAddress( + address: EthAddress, + typedData: TypedDataDefinition, + _context: SigningContext, + ): Promise { const signer = this.signersByAddress.get(address.toString()); if (!signer) { throw new Error(`No signer found for address ${address.toString()}`); @@ -73,9 +80,10 @@ export class LocalKeyStore implements ValidatorKeyStore { * Sign a message using eth_sign with all keystore private keys * * @param message - The message to sign + * @param _context - Signing context (ignored by LocalKeyStore, used for HA protection) * @return signatures */ - public signMessage(message: Buffer32): Promise { + public signMessage(message: Buffer32, _context: SigningContext): Promise { return Promise.all(this.signers.map(signer => signer.signMessage(message))); } @@ -83,10 +91,11 @@ export class LocalKeyStore implements ValidatorKeyStore { * Sign a message using eth_sign with a specific address's private key * @param address - The address of the signer to use * @param message - The message to sign + * @param _context - Signing context (ignored by LocalKeyStore, used for HA protection) * @returns signature for the specified address * @throws Error if the address is not found in the keystore */ - public signMessageWithAddress(address: EthAddress, message: Buffer32): Promise { + public signMessageWithAddress(address: EthAddress, message: Buffer32, _context: SigningContext): Promise { const signer = this.signersByAddress.get(address.toString()); if (!signer) { throw new Error(`No signer found for address ${address.toString()}`); diff --git a/yarn-project/validator-client/src/key_store/node_keystore_adapter.test.ts b/yarn-project/validator-client/src/key_store/node_keystore_adapter.test.ts index f84ba6972076..c1faf4cc8259 100644 --- a/yarn-project/validator-client/src/key_store/node_keystore_adapter.test.ts +++ b/yarn-project/validator-client/src/key_store/node_keystore_adapter.test.ts @@ -1,12 +1,23 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import { beforeEach, describe, expect, it } from '@jest/globals'; import type { TypedDataDefinition } from 'viem'; import { NodeKeystoreAdapter } from './node_keystore_adapter.js'; +// Helper to create a mock signing context for testing +function createMockContext(): SigningContext { + return { + slot: SlotNumber(1), + blockNumber: BlockNumber(1), + dutyType: DutyType.ATTESTATION, + }; +} + // ---- Test data -------------------------------------------------------------- const K = { @@ -277,7 +288,7 @@ describe('NodeKeystoreAdapter', () => { it('signTypedData across attesters', async () => { const ad = NodeKeystoreAdapter.fromKeystoreConfig(pkOnlyCfg); - const sigs = await ad.signTypedData(mkTypedData()); + const sigs = await ad.signTypedData(mkTypedData(), createMockContext()); expect(sigs).toHaveLength(2); expect(new Set(sigs.map(s => s.toString())).size).toBe(2); }); @@ -285,26 +296,29 @@ describe('NodeKeystoreAdapter', () => { it('signTypedDataWithAddress (different keys -> different sigs)', async () => { const adaptor = NodeKeystoreAdapter.fromKeystoreConfig(pkOnlyCfg); const td = mkTypedData(); - const s1 = await adaptor.signTypedDataWithAddress(addr(A.ATTESTER_1), td); - const s3 = await adaptor.signTypedDataWithAddress(addr(A.ATTESTER_3), td); + const context = createMockContext(); + const s1 = await adaptor.signTypedDataWithAddress(addr(A.ATTESTER_1), td, context); + const s3 = await adaptor.signTypedDataWithAddress(addr(A.ATTESTER_3), td, context); expect(s1.toString()).not.toBe(s3.toString()); }); it('signMessage + signMessageWithAddress', async () => { const adaptor = NodeKeystoreAdapter.fromKeystoreConfig(pkOnlyCfg); const msg = fixedBuffer32(); - const sigs = await adaptor.signMessage(msg); + const context = createMockContext(); + const sigs = await adaptor.signMessage(msg, context); expect(sigs).toHaveLength(2); - const s1 = await adaptor.signMessageWithAddress(addr(A.ATTESTER_1), msg); + const s1 = await adaptor.signMessageWithAddress(addr(A.ATTESTER_1), msg, context); expect(s1).toBeDefined(); }); it('unknown address -> rejects', async () => { const adaptor = NodeKeystoreAdapter.fromKeystoreConfig(pkOnlyCfg); - await expect(adaptor.signTypedDataWithAddress(addr(A.UNKNOWN), mkTypedData())).rejects.toThrow( + const context = createMockContext(); + await expect(adaptor.signTypedDataWithAddress(addr(A.UNKNOWN), mkTypedData(), context)).rejects.toThrow( `No signer found for address ${lower(A.UNKNOWN)}`, ); - await expect(adaptor.signMessageWithAddress(addr(A.UNKNOWN), fixedBuffer32())).rejects.toThrow( + await expect(adaptor.signMessageWithAddress(addr(A.UNKNOWN), fixedBuffer32(), context)).rejects.toThrow( `No signer found for address ${lower(A.UNKNOWN)}`, ); }); diff --git a/yarn-project/validator-client/src/key_store/node_keystore_adapter.ts b/yarn-project/validator-client/src/key_store/node_keystore_adapter.ts index 7a53be46ccee..415463bb3889 100644 --- a/yarn-project/validator-client/src/key_store/node_keystore_adapter.ts +++ b/yarn-project/validator-client/src/key_store/node_keystore_adapter.ts @@ -6,6 +6,7 @@ import { KeystoreManager, loadKeystoreFile } from '@aztec/node-keystore'; import type { EthRemoteSignerConfig } from '@aztec/node-keystore'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { InvalidValidatorPrivateKeyError } from '@aztec/stdlib/validators'; +import type { SigningContext } from '@aztec/validator-ha-signer/types'; import type { TypedDataDefinition } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; @@ -230,9 +231,10 @@ export class NodeKeystoreAdapter implements ExtendedValidatorKeyStore { /** * Sign typed data with all attester signers across validators. * @param typedData EIP-712 typed data + * @param _context Signing context (ignored by NodeKeystoreAdapter, used for HA protection) * @returns Array of signatures in validator order, flattened */ - async signTypedData(typedData: TypedDataDefinition): Promise { + async signTypedData(typedData: TypedDataDefinition, _context: SigningContext): Promise { const jobs: Promise[] = []; for (const i of this.validatorIndices()) { const v = this.ensureValidator(i); @@ -246,9 +248,10 @@ export class NodeKeystoreAdapter implements ExtendedValidatorKeyStore { /** * Sign a message with all attester signers across validators. * @param message 32-byte message (already hashed/padded as needed) + * @param _context Signing context (ignored by NodeKeystoreAdapter, used for HA protection) * @returns Array of signatures in validator order, flattened */ - async signMessage(message: Buffer32): Promise { + async signMessage(message: Buffer32, _context: SigningContext): Promise { const jobs: Promise[] = []; for (const i of this.validatorIndices()) { const v = this.ensureValidator(i); @@ -264,10 +267,15 @@ export class NodeKeystoreAdapter implements ExtendedValidatorKeyStore { * Hydrates caches on-demand when the address is first seen. * @param address Address to sign with * @param typedData EIP-712 typed data + * @param _context Signing context (ignored by NodeKeystoreAdapter, used for HA protection) * @returns Signature from the signer matching the address * @throws Error when no signer exists for the address */ - async signTypedDataWithAddress(address: EthAddress, typedData: TypedDataDefinition): Promise { + async signTypedDataWithAddress( + address: EthAddress, + typedData: TypedDataDefinition, + _context: SigningContext, + ): Promise { const entry = this.addressIndex.get(NodeKeystoreAdapter.key(address)); if (entry) { return await this.keystoreManager.signTypedData(entry.signer, typedData); @@ -290,10 +298,11 @@ export class NodeKeystoreAdapter implements ExtendedValidatorKeyStore { * Hydrates caches on-demand when the address is first seen. * @param address Address to sign with * @param message 32-byte message + * @param _context Signing context (ignored by NodeKeystoreAdapter, used for HA protection) * @returns Signature from the signer matching the address * @throws Error when no signer exists for the address */ - async signMessageWithAddress(address: EthAddress, message: Buffer32): Promise { + async signMessageWithAddress(address: EthAddress, message: Buffer32, _context: SigningContext): Promise { const entry = this.addressIndex.get(NodeKeystoreAdapter.key(address)); if (entry) { return await this.keystoreManager.signMessage(entry.signer, message); @@ -372,4 +381,18 @@ export class NodeKeystoreAdapter implements ExtendedValidatorKeyStore { const validatorIndex = this.findValidatorIndexForAttester(attesterAddress); return this.keystoreManager.getEffectiveRemoteSignerConfig(validatorIndex, attesterAddress); } + + /** + * Start the key store - no-op + */ + start(): Promise { + return Promise.resolve(); + } + + /** + * Stop the key store - no-op + */ + stop(): Promise { + return Promise.resolve(); + } } diff --git a/yarn-project/validator-client/src/key_store/web3signer_key_store.integration.test.ts b/yarn-project/validator-client/src/key_store/web3signer_key_store.integration.test.ts index 7a17a06a3bb6..3408d0036a94 100644 --- a/yarn-project/validator-client/src/key_store/web3signer_key_store.integration.test.ts +++ b/yarn-project/validator-client/src/key_store/web3signer_key_store.integration.test.ts @@ -1,7 +1,9 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { makeEthSignDigest, recoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import { afterEach, beforeEach, describe, expect, it, jest } from '@jest/globals'; import { type TypedDataDefinition, hashTypedData } from 'viem'; @@ -9,6 +11,15 @@ import { type TypedDataDefinition, hashTypedData } from 'viem'; import { LocalKeyStore } from './local_key_store.js'; import { Web3SignerKeyStore } from './web3signer_key_store.js'; +// Helper to create a mock signing context for testing +function createMockContext(): SigningContext { + return { + slot: SlotNumber(1), + blockNumber: BlockNumber(1), + dutyType: DutyType.ATTESTATION, + }; +} + describe('Web3SignerKeyStore Integration Tests', () => { const WEB3_SIGNER_URL = 'http://localhost:9000'; // use private keys below to test the integration @@ -113,7 +124,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signatures = await keyStore.signMessage(digest); + const signatures = await keyStore.signMessage(digest, createMockContext()); expect(signatures).toHaveLength(TEST_ADDRESSES.length); // Each address requires 1 JSON-RPC call @@ -136,7 +147,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signMessageWithAddress(address, digest); + const signature = await keyStore.signMessageWithAddress(address, digest, createMockContext()); expect(signature).toBeInstanceOf(Signature); expect(mockFetch).toHaveBeenCalledWith(WEB3_SIGNER_URL, { @@ -157,7 +168,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { const digest = Buffer32.random(); const unknownAddress = EthAddress.fromString('0x9999999999999999999999999999999999999999'); - await expect(keyStore.signMessageWithAddress(unknownAddress, digest)).rejects.toThrow( + await expect(keyStore.signMessageWithAddress(unknownAddress, digest, createMockContext())).rejects.toThrow( `Address ${unknownAddress.toString()} not found in keystore`, ); @@ -179,7 +190,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signatures = await keyStore.signMessage(message); + const signatures = await keyStore.signMessage(message, createMockContext()); expect(signatures).toHaveLength(TEST_ADDRESSES.length); expect(mockFetch).toHaveBeenCalledTimes(TEST_ADDRESSES.length); @@ -201,7 +212,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signMessageWithAddress(address, message); + const signature = await keyStore.signMessageWithAddress(address, message, createMockContext()); expect(signature).toBeInstanceOf(Signature); expect(mockFetch).toHaveBeenCalledWith(WEB3_SIGNER_URL, { @@ -232,7 +243,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signatures = await keyStore.signTypedData(TEST_TYPED_DATA); + const signatures = await keyStore.signTypedData(TEST_TYPED_DATA, createMockContext()); expect(signatures).toHaveLength(TEST_ADDRESSES.length); expect(mockFetch).toHaveBeenCalledTimes(TEST_ADDRESSES.length); @@ -269,7 +280,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA); + const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext()); expect(signature).toBeInstanceOf(Signature); expect(mockFetch).toHaveBeenCalledWith(WEB3_SIGNER_URL, { @@ -289,9 +300,9 @@ describe('Web3SignerKeyStore Integration Tests', () => { it('should throw error for unknown address when signing typed data', async () => { const unknownAddress = EthAddress.fromString('0x9999999999999999999999999999999999999999'); - await expect(keyStore.signTypedDataWithAddress(unknownAddress, TEST_TYPED_DATA)).rejects.toThrow( - `Address ${unknownAddress.toString()} not found in keystore`, - ); + await expect( + keyStore.signTypedDataWithAddress(unknownAddress, TEST_TYPED_DATA, createMockContext()), + ).rejects.toThrow(`Address ${unknownAddress.toString()} not found in keystore`); expect(mockFetch).not.toHaveBeenCalled(); }); @@ -312,7 +323,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signMessageWithAddress(address, digest); + const signature = await keyStore.signMessageWithAddress(address, digest, createMockContext()); expect(signature).toBeInstanceOf(Signature); }); @@ -333,7 +344,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signMessageWithAddress(address, digest); + const signature = await keyStore.signMessageWithAddress(address, digest, createMockContext()); expect(signature).toBeInstanceOf(Signature); }); @@ -353,7 +364,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA); + const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext()); expect(signature).toBeInstanceOf(Signature); }); @@ -375,7 +386,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - await expect(keyStore.signMessageWithAddress(address, digest)).rejects.toThrow( + await expect(keyStore.signMessageWithAddress(address, digest, createMockContext())).rejects.toThrow( 'Web3Signer JSON-RPC error: -32602 - Invalid params', ); }); @@ -396,7 +407,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA)).rejects.toThrow( + await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext())).rejects.toThrow( 'Web3Signer JSON-RPC error: -32602 - Invalid params', ); }); @@ -412,7 +423,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { text: () => Promise.resolve('Invalid signing request'), } as Response); - await expect(keyStore.signMessageWithAddress(address, digest)).rejects.toThrow( + await expect(keyStore.signMessageWithAddress(address, digest, createMockContext())).rejects.toThrow( 'Web3Signer request failed: 400 Bad Request - Invalid signing request', ); }); @@ -427,7 +438,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { text: () => Promise.resolve('Invalid signing request'), } as Response); - await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA)).rejects.toThrow( + await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext())).rejects.toThrow( 'Web3Signer request failed: 400 Bad Request - Invalid signing request', ); }); @@ -438,7 +449,9 @@ describe('Web3SignerKeyStore Integration Tests', () => { mockFetch.mockRejectedValue(new Error('Network error')); - await expect(keyStore.signMessageWithAddress(address, digest)).rejects.toThrow('Network error'); + await expect(keyStore.signMessageWithAddress(address, digest, createMockContext())).rejects.toThrow( + 'Network error', + ); }); it('should handle network errors for typed data', async () => { @@ -446,7 +459,9 @@ describe('Web3SignerKeyStore Integration Tests', () => { mockFetch.mockRejectedValue(new Error('Network error')); - await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA)).rejects.toThrow('Network error'); + await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext())).rejects.toThrow( + 'Network error', + ); }); it('should handle empty response', async () => { @@ -463,7 +478,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - await expect(keyStore.signMessageWithAddress(address, digest)).rejects.toThrow( + await expect(keyStore.signMessageWithAddress(address, digest, createMockContext())).rejects.toThrow( 'Invalid response from Web3Signer: no result found', ); }); @@ -481,7 +496,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { }), } as Response); - await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA)).rejects.toThrow( + await expect(keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext())).rejects.toThrow( 'Invalid response from Web3Signer: no result found', ); }); @@ -526,7 +541,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { expect(address.toString()).toBe(localAddress.toString()); - const signature = await keyStore.signMessageWithAddress(address, digest); + const signature = await keyStore.signMessageWithAddress(address, digest, createMockContext()); // For Web3Signer (eth_sign), we use makeEthSignDigest on recovery // because eth_sign automatically applies Ethereum message prefixing @@ -546,7 +561,7 @@ describe('Web3SignerKeyStore Integration Tests', () => { const address = keyStore.getAddress(0); - const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA); + const signature = await keyStore.signTypedDataWithAddress(address, TEST_TYPED_DATA, createMockContext()); expect(signature).toBeInstanceOf(Signature); diff --git a/yarn-project/validator-client/src/key_store/web3signer_key_store.ts b/yarn-project/validator-client/src/key_store/web3signer_key_store.ts index 04e503132559..6f958b23fc26 100644 --- a/yarn-project/validator-client/src/key_store/web3signer_key_store.ts +++ b/yarn-project/validator-client/src/key_store/web3signer_key_store.ts @@ -2,6 +2,7 @@ import type { Buffer32 } from '@aztec/foundation/buffer'; import { normalizeSignature } from '@aztec/foundation/crypto/secp256k1-signer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; +import type { SigningContext } from '@aztec/validator-ha-signer/types'; import type { TypedDataDefinition } from 'viem'; @@ -44,9 +45,10 @@ export class Web3SignerKeyStore implements ValidatorKeyStore { /** * Sign EIP-712 typed data with all keystore addresses * @param typedData - The complete EIP-712 typed data structure (domain, types, primaryType, message) + * @param _context - Signing context (ignored by Web3SignerKeyStore, used for HA protection) * @return signatures */ - public signTypedData(typedData: TypedDataDefinition): Promise { + public signTypedData(typedData: TypedDataDefinition, _context: SigningContext): Promise { return Promise.all(this.addresses.map(address => this.makeJsonRpcSignTypedDataRequest(address, typedData))); } @@ -54,10 +56,15 @@ export class Web3SignerKeyStore implements ValidatorKeyStore { * Sign EIP-712 typed data with a specific address * @param address - The address of the signer to use * @param typedData - The complete EIP-712 typed data structure (domain, types, primaryType, message) + * @param _context - Signing context (ignored by Web3SignerKeyStore, used for HA protection) * @returns signature for the specified address * @throws Error if the address is not found in the keystore or signing fails */ - public async signTypedDataWithAddress(address: EthAddress, typedData: TypedDataDefinition): Promise { + public async signTypedDataWithAddress( + address: EthAddress, + typedData: TypedDataDefinition, + _context: SigningContext, + ): Promise { if (!this.addresses.some(addr => addr.equals(address))) { throw new Error(`Address ${address.toString()} not found in keystore`); } @@ -69,9 +76,10 @@ export class Web3SignerKeyStore implements ValidatorKeyStore { * Sign a message with all keystore addresses using EIP-191 prefix * * @param message - The message to sign + * @param _context - Signing context (ignored by Web3SignerKeyStore, used for HA protection) * @return signatures */ - public signMessage(message: Buffer32): Promise { + public signMessage(message: Buffer32, _context: SigningContext): Promise { return Promise.all(this.addresses.map(address => this.makeJsonRpcSignRequest(address, message))); } @@ -79,10 +87,15 @@ export class Web3SignerKeyStore implements ValidatorKeyStore { * Sign a message with a specific address using EIP-191 prefix * @param address - The address of the signer to use * @param message - The message to sign + * @param _context - Signing context (ignored by Web3SignerKeyStore, used for HA protection) * @returns signature for the specified address * @throws Error if the address is not found in the keystore or signing fails */ - public async signMessageWithAddress(address: EthAddress, message: Buffer32): Promise { + public async signMessageWithAddress( + address: EthAddress, + message: Buffer32, + _context: SigningContext, + ): Promise { if (!this.addresses.some(addr => addr.equals(address))) { throw new Error(`Address ${address.toString()} not found in keystore`); } diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts new file mode 100644 index 000000000000..94a7cf46875c --- /dev/null +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -0,0 +1,316 @@ +/** + * Integration tests for High-Availability validator setup + * + * These tests use real services (pglite database, HA signer, HA key store) + * rather than mocks to verify the HA coordination works correctly. + */ +import type { BlobClientInterface } from '@aztec/blob-client/client'; +import type { EpochCache } from '@aztec/epoch-cache'; +import { SecretValue } from '@aztec/foundation/config'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import type { Hex } from '@aztec/foundation/string'; +import { TestDateProvider } from '@aztec/foundation/timer'; +import { type KeyStore, KeystoreManager } from '@aztec/node-keystore'; +import type { P2P, TxProvider } from '@aztec/p2p'; +import { BlockProposalValidator } from '@aztec/p2p'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; +import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; +import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; +import { makeCheckpointProposal, makeL2BlockHeader, mockTx } from '@aztec/stdlib/testing'; +import { TxHash } from '@aztec/stdlib/tx'; +import { getTelemetryClient } from '@aztec/telemetry-client'; +import { INSERT_SCHEMA_VERSION, SCHEMA_SETUP, SCHEMA_VERSION } from '@aztec/validator-ha-signer/db'; +import { DutyAlreadySignedError } from '@aztec/validator-ha-signer/errors'; +import { createHASigner } from '@aztec/validator-ha-signer/factory'; +import { Pool } from '@aztec/validator-ha-signer/test'; + +import { PGlite } from '@electric-sql/pglite'; +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; + +import { BlockProposalHandler } from './block_proposal_handler.js'; +import type { FullNodeCheckpointsBuilder } from './checkpoint_builder.js'; +import type { ValidatorClientConfig } from './config.js'; +import { HAKeyStore } from './key_store/ha_key_store.js'; +import { NodeKeystoreAdapter } from './key_store/node_keystore_adapter.js'; +import { ValidatorMetrics } from './metrics.js'; +import { ValidatorClient } from './validator.js'; + +describe('ValidatorClient HA Integration', () => { + let pglite: PGlite; + let validatorPrivateKeys: `0x${string}`[]; + let validatorAccounts: PrivateKeyAccount[]; + let keyStoreManager: KeystoreManager; + + // Mock dependencies + let p2pClient: MockProxy; + let blockSource: MockProxy; + let l1ToL2MessageSource: MockProxy; + let epochCache: MockProxy; + let checkpointsBuilder: MockProxy; + let worldState: MockProxy; + let txProvider: MockProxy; + let blobClient: MockProxy; + let dateProvider: TestDateProvider; + + // Track resources for cleanup + let pools: Pool[]; + let validators: ValidatorClient[]; + + beforeEach(async () => { + // Initialize cleanup tracking arrays + pools = []; + validators = []; + // Create a fresh pglite database for each test to ensure complete isolation + pglite = new PGlite(); + // Set up the database schema for testing + for (const statement of SCHEMA_SETUP) { + await pglite.query(statement); + } + await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); + + // Generate validator keys - using 5 validators to test HA coordination at scale + validatorPrivateKeys = Array.from({ length: 5 }, () => generatePrivateKey()); + validatorAccounts = validatorPrivateKeys.map(privateKey => privateKeyToAccount(privateKey)); + + // Set up mocks + p2pClient = mock(); + p2pClient.getCheckpointAttestationsForSlot.mockImplementation(() => Promise.resolve([])); + p2pClient.addCheckpointAttestations.mockResolvedValue(); + p2pClient.broadcastCheckpointAttestations.mockResolvedValue(); + checkpointsBuilder = mock(); + checkpointsBuilder.getConfig.mockReturnValue({ + l1GenesisTime: 1n, + slotDuration: 24, + l1ChainId: 1, + rollupVersion: 1, + }); + worldState = mock(); + epochCache = mock(); + // Default mock: return all addresses passed (all are in committee) + epochCache.filterInCommittee.mockImplementation((_slot, addresses) => Promise.resolve(addresses)); + blockSource = mock(); + l1ToL2MessageSource = mock(); + txProvider = mock(); + l1ToL2MessageSource.getL1ToL2Messages.mockResolvedValue([]); + dateProvider = new TestDateProvider(); + blobClient = mock(); + blobClient.canUpload.mockReturnValue(false); + blobClient.sendBlobsToFilestore.mockResolvedValue(true); + + // Create keystore + const keyStore: KeyStore = { + schemaVersion: 1, + validators: [ + { + attester: validatorPrivateKeys.map(key => key as Hex<32>), + feeRecipient: AztecAddress.ZERO, + }, + ], + }; + keyStoreManager = new KeystoreManager(keyStore); + + // Create 5 HA validator instances for use across all tests + const baseConfig: ValidatorClientConfig & Pick = { + validatorPrivateKeys: new SecretValue(validatorPrivateKeys), + attestationPollingIntervalMs: 1000, + disableValidator: false, + disabledValidators: [], + validatorReexecute: false, + slashBroadcastedInvalidBlockPenalty: 1n, + haSigningEnabled: true, + nodeId: 'ha-node-1', // temporary + pollingIntervalMs: 100, + signingTimeoutMs: 3000, + maxStuckDutiesAgeMs: 72000, + databaseUrl: 'postgresql://test', + }; + + // Create 5 validator nodes with unique node IDs + for (let i = 0; i < 5; i++) { + const pool = new Pool({ pglite }); + pools.push(pool); + const config = { ...baseConfig, nodeId: `ha-node-${i + 1}` }; + const validator = await createHAValidator(pool, config); + await validator.start(); + validators.push(validator); + } + }); + + afterEach(async () => { + // Stop all HA validators + for (const validator of validators) { + await validator.stop(); + } + + // Close pglite instance + if (pglite) { + await pglite.close(); + pglite = undefined as any; + } + }); + + /** + * Helper to create a ValidatorClient with HA signing enabled using pglite + * Tracks resources for cleanup + */ + async function createHAValidator( + pool: Pool, + config: ValidatorClientConfig & Pick, + ): Promise { + // Track pool for cleanup + pools.push(pool); + // Create HA signer with pglite pool + const { signer: haSigner } = await createHASigner(config, { pool: pool as any }); + + // Create base keystore + const baseKeyStore = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); + + // Wrap with HA key store + const haKeyStore = new HAKeyStore(baseKeyStore, haSigner); + + // Create block proposal handler + const metrics = new ValidatorMetrics(getTelemetryClient()); + const blockProposalValidator = new BlockProposalValidator(epochCache, { + txsPermitted: true, + }); + const blockProposalHandler = new BlockProposalHandler( + checkpointsBuilder, + worldState, + blockSource, + l1ToL2MessageSource, + txProvider, + blockProposalValidator, + epochCache, + config, + metrics, + dateProvider, + getTelemetryClient(), + ); + + // Create validator using protected constructor via type assertion + // This is necessary to test HA coordination with real services + const validator = new (ValidatorClient as any)( + haKeyStore, + epochCache, + p2pClient, + blockProposalHandler, + blockSource, + checkpointsBuilder, + worldState, + l1ToL2MessageSource, + config, + blobClient, + dateProvider, + getTelemetryClient(), + ) as ValidatorClient; + + // Note: Validator is tracked in the calling code (beforeEach) + return validator; + } + + describe('High-Availability signing coordination', () => { + it('should allow only one validator instance to create a block proposal for the same slot', async () => { + // Use all 5 validators - all try to create the same block proposal + const blockHeader = makeL2BlockHeader(1, 100, 100).toBlockHeader(); + const indexWithinCheckpoint = 0; + const inHash = computeInHashFromL1ToL2Messages([]); + const archive = Fr.random(); + const txs = await Promise.all([1, 2, 3].map(() => mockTx())); + const proposerAddress = EthAddress.fromString(validatorAccounts[0].address); + + // All 5 validators try to create a block proposal for the same slot simultaneously + const results = await Promise.allSettled( + validators.map(v => + v.createBlockProposal(blockHeader, indexWithinCheckpoint, inHash, archive, txs, proposerAddress, { + publishFullTxs: false, + }), + ), + ); + + // Exactly one should succeed, others should fail with DutyAlreadySignedError + const successCount = results.filter(r => r.status === 'fulfilled').length; + const failureCount = results.filter( + r => r.status === 'rejected' && r.reason instanceof DutyAlreadySignedError, + ).length; + + expect(successCount).toBe(1); + expect(failureCount).toBe(4); + + // Verify the successful proposal is valid + const successfulResult = results.find(r => r.status === 'fulfilled') as PromiseFulfilledResult | undefined; + expect(successfulResult).toBeDefined(); + expect(successfulResult?.value).toBeDefined(); + expect(successfulResult?.value?.getSender()).toEqual(proposerAddress); + }); + + it('should allow different validators to create proposals for different slots', async () => { + const proposerAddress = EthAddress.fromString(validatorAccounts[0].address); + const txs = await Promise.all([1, 2, 3].map(() => mockTx())); + const inHash = computeInHashFromL1ToL2Messages([]); + + // Each of the 5 validators creates a proposal for a different slot + const proposals = await Promise.all( + validators.map((v, i) => { + const blockHeader = makeL2BlockHeader(1, 100 + i, 100 + i).toBlockHeader(); + const archive = Fr.random(); + return v.createBlockProposal(blockHeader, 0, inHash, archive, txs, proposerAddress, { + publishFullTxs: false, + }); + }), + ); + + // All 5 should succeed since they're for different slots + expect(proposals).toHaveLength(5); + proposals.forEach(proposal => { + expect(proposal).toBeDefined(); + expect(proposal.getSender()).toEqual(proposerAddress); + }); + }); + + it('should coordinate attestations when multiple validators attest to the same checkpoint proposal', async () => { + // Create checkpoint proposal using the test helper (without HA signing) + // This bypasses HA signing for proposal creation - we only want to test attestation HA coordination + const testSlot = 200; + const txHashes = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); + const checkpointProposal = await makeCheckpointProposal({ + checkpointHeader: makeL2BlockHeader(1, 100, testSlot).toCheckpointHeader(), + lastBlock: { + blockHeader: makeL2BlockHeader(1, 100, testSlot), + txHashes, + }, + }); + + // All 5 validators try to attest to the same checkpoint proposal simultaneously + const results = await Promise.allSettled(validators.map(v => v.collectOwnAttestations(checkpointProposal))); + + // Check for errors - if all fail, at least one should have a meaningful error + const allFailed = results.every(r => r.status === 'rejected'); + if (allFailed) { + const hasExpectedError = results.some( + r => r.status === 'rejected' && r.reason instanceof DutyAlreadySignedError, + ); + if (!hasExpectedError) { + // Unexpected error - rethrow to see what went wrong + throw new Error( + `All validators failed with unexpected errors: ${results.map(r => (r.status === 'rejected' ? r.reason : '')).join(', ')}`, + ); + } + } + + // Count total attestations across all validators + // HA coordination will ensure only one validator succeeds per address + const totalAttestations = results + .filter((r): r is PromiseFulfilledResult => r.status === 'fulfilled') + .reduce((sum, r) => sum + r.value.length, 0); + + // We have 5 validator addresses, so total attestations should be 5 + // HA coordination ensures only one node signs per address + expect(totalAttestations).toBe(5); + }); + }); +}); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 0c8ed8c83455..e36c7c464cbe 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -53,6 +53,7 @@ import type { FullNodeCheckpointsBuilder, } from './checkpoint_builder.js'; import { type ValidatorClientConfig, validatorClientConfigMappings } from './config.js'; +import type { HAKeyStore } from './key_store/ha_key_store.js'; import { ValidatorClient } from './validator.js'; describe('ValidatorClient', () => { @@ -70,8 +71,9 @@ describe('ValidatorClient', () => { let txProvider: MockProxy; let keyStoreManager: KeystoreManager; let blobClient: MockProxy; + let haKeyStore: MockProxy; - beforeEach(() => { + beforeEach(async () => { p2pClient = mock(); p2pClient.getCheckpointAttestationsForSlot.mockImplementation(() => Promise.resolve([])); p2pClient.handleAuthRequestFromPeer.mockResolvedValue(StatusMessage.random()); @@ -98,10 +100,15 @@ describe('ValidatorClient', () => { blobClient = mock(); blobClient.canUpload.mockReturnValue(false); blobClient.sendBlobsToFilestore.mockResolvedValue(true); + haKeyStore = mock(); + haKeyStore.start.mockImplementation(() => Promise.resolve()); + haKeyStore.stop.mockImplementation(() => Promise.resolve()); const validatorPrivateKeys = [generatePrivateKey(), generatePrivateKey()]; validatorAccounts = validatorPrivateKeys.map(privateKey => privateKeyToAccount(privateKey)); + haKeyStore.getAddresses.mockReturnValue(validatorAccounts.map(account => EthAddress.fromString(account.address))); + config = { validatorPrivateKeys: new SecretValue(validatorPrivateKeys), attestationPollingIntervalMs: 1000, @@ -110,6 +117,11 @@ describe('ValidatorClient', () => { validatorReexecute: false, slashBroadcastedInvalidBlockPenalty: 1n, disableTransactions: false, + haSigningEnabled: false, + nodeId: 'test-node-id', + pollingIntervalMs: 1000, + signingTimeoutMs: 1000, + maxStuckDutiesAgeMs: 72000, }; const keyStore: KeyStore = { @@ -129,7 +141,7 @@ describe('ValidatorClient', () => { }; keyStoreManager = new KeystoreManager(keyStore); - validatorClient = ValidatorClient.new( + validatorClient = await ValidatorClient.new( config, checkpointsBuilder, worldState, @@ -173,7 +185,7 @@ describe('ValidatorClient', () => { describe('collectAttestations', () => { it('should timeout if we do not collect enough attestations in time', async () => { - const proposal = await makeCheckpointProposal(); + const proposal = await makeCheckpointProposal({ lastBlock: {} }); await expect( validatorClient.collectAttestations(proposal, 2, new Date(dateProvider.now() + 100)), @@ -219,7 +231,7 @@ describe('ValidatorClient', () => { validatorAccounts.map(account => EthAddress.fromString(account.address)), ); const addCheckpointAttestationsSpy = jest.spyOn(p2pClient, 'addCheckpointAttestations'); - const proposal = await makeCheckpointProposal(); + const proposal = await makeCheckpointProposal({ lastBlock: {} }); // collectAttestations still throws as we don't have a real p2pClient await expect( validatorClient.collectAttestations(proposal, 3, new Date(dateProvider.now() + 100)), @@ -750,4 +762,15 @@ describe('ValidatorClient', () => { expect(validatorClient.getValidatorAddresses()).toEqual(addresses.slice(1)); }); }); + + describe('lifecycle methods', () => { + it('should run start() / stop() on the HA key store', async () => { + (validatorClient as any).config.haSigningEnabled = true; + (validatorClient as any).keyStore = haKeyStore; + await validatorClient.start(); + expect(haKeyStore.start).toHaveBeenCalledTimes(1); + await validatorClient.stop(); + expect(haKeyStore.stop).toHaveBeenCalledTimes(1); + }); + }); }); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 42444d0bbe27..6152d1ec0055 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,7 +1,7 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; import { type Blob, getBlobsPerL1Block } from '@aztec/blob-lib'; import type { EpochCache } from '@aztec/epoch-cache'; -import { BlockNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { TimeoutError } from '@aztec/foundation/error'; import type { EthAddress } from '@aztec/foundation/eth-address'; @@ -37,6 +37,8 @@ import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, CheckpointGlobalVariables, Tx } from '@aztec/stdlib/tx'; import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { type TelemetryClient, type Tracer, getTelemetryClient } from '@aztec/telemetry-client'; +import { createHASigner } from '@aztec/validator-ha-signer/factory'; +import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import { EventEmitter } from 'events'; import type { TypedDataDefinition } from 'viem'; @@ -44,6 +46,8 @@ import type { TypedDataDefinition } from 'viem'; import { BlockProposalHandler, type BlockProposalValidationFailureReason } from './block_proposal_handler.js'; import type { FullNodeCheckpointsBuilder } from './checkpoint_builder.js'; import { ValidationService } from './duties/validation_service.js'; +import { HAKeyStore } from './key_store/ha_key_store.js'; +import type { ExtendedValidatorKeyStore } from './key_store/interface.js'; import { NodeKeystoreAdapter } from './key_store/node_keystore_adapter.js'; import { ValidatorMetrics } from './metrics.js'; @@ -83,7 +87,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) private validatedBlockSlots: Set = new Set(); protected constructor( - private keyStore: NodeKeystoreAdapter, + private keyStore: ExtendedValidatorKeyStore, private epochCache: EpochCache, private p2pClient: P2P, private blockProposalHandler: BlockProposalHandler, @@ -166,7 +170,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } } - static new( + static async new( config: ValidatorClientFullConfig, checkpointsBuilder: FullNodeCheckpointsBuilder, worldState: WorldStateSynchronizer, @@ -198,8 +202,19 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) telemetry, ); + let validatorKeyStore: ExtendedValidatorKeyStore = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); + if (config.haSigningEnabled) { + // If maxStuckDutiesAgeMs is not explicitly set, compute it from Aztec slot duration + const haConfig = { + ...config, + maxStuckDutiesAgeMs: config.maxStuckDutiesAgeMs ?? epochCache.getL1Constants().slotDuration * 2 * 1000, + }; + const { signer } = await createHASigner(haConfig); + validatorKeyStore = new HAKeyStore(validatorKeyStore, signer); + } + const validator = new ValidatorClient( - NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager), + validatorKeyStore, epochCache, p2pClient, blockProposalHandler, @@ -226,8 +241,8 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return this.blockProposalHandler; } - public signWithAddress(addr: EthAddress, msg: TypedDataDefinition) { - return this.keyStore.signTypedDataWithAddress(addr, msg); + public signWithAddress(addr: EthAddress, msg: TypedDataDefinition, context: SigningContext) { + return this.keyStore.signTypedDataWithAddress(addr, msg, context); } public getCoinbaseForAttestor(attestor: EthAddress): EthAddress { @@ -252,6 +267,8 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return; } + await this.keyStore.start(); + await this.registerHandlers(); const myAddresses = this.getValidatorAddresses(); @@ -267,6 +284,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) public async stop() { await this.epochCacheUpdateLoop.stop(); + await this.keyStore.stop(); } /** Register handlers on the p2p client */ @@ -748,8 +766,10 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) async signAttestationsAndSigners( attestationsAndSigners: CommitteeAttestationsAndSigners, proposer: EthAddress, + slot: SlotNumber, + blockNumber: BlockNumber | CheckpointNumber, ): Promise { - return await this.validationService.signAttestationsAndSigners(attestationsAndSigners, proposer); + return await this.validationService.signAttestationsAndSigners(attestationsAndSigners, proposer, slot, blockNumber); } async collectOwnAttestations(proposal: CheckpointProposal): Promise { @@ -856,7 +876,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } const payloadToSign = authRequest.getPayloadToSign(); - const signature = await this.keyStore.signMessageWithAddress(addressToUse, payloadToSign); + // AUTH_REQUEST doesn't require HA protection - multiple signatures are safe + const context: SigningContext = { dutyType: DutyType.AUTH_REQUEST }; + const signature = await this.keyStore.signMessageWithAddress(addressToUse, payloadToSign, context); const authResponse = new AuthResponse(statusMessage, signature); return authResponse.toBuffer(); } diff --git a/yarn-project/validator-client/tsconfig.json b/yarn-project/validator-client/tsconfig.json index 9da8b98b8766..bb8de0d64b48 100644 --- a/yarn-project/validator-client/tsconfig.json +++ b/yarn-project/validator-client/tsconfig.json @@ -50,6 +50,9 @@ }, { "path": "../telemetry-client" + }, + { + "path": "../validator-ha-signer" } ], "include": ["src"] diff --git a/yarn-project/validator-ha-signer/README.md b/yarn-project/validator-ha-signer/README.md index f2ff6aee3d96..fa69ded55bc5 100644 --- a/yarn-project/validator-ha-signer/README.md +++ b/yarn-project/validator-ha-signer/README.md @@ -9,39 +9,21 @@ Distributed locking and slashing protection for Aztec validators running in high - **Automatic Retry**: Failed signing attempts are cleared, allowing other nodes to retry - **PostgreSQL Backend**: Shared database for coordination across nodes -## Quick Start +## Integration with Validator Client -### Option 1: Automatic Migrations (Simplest) +The HA signer is automatically integrated into the validator client when `VALIDATOR_HA_SIGNING_ENABLED=true` is set. The validator client will: -```typescript -import { createHASigner } from '@aztec/validator-ha-signer/factory'; +1. Create the HA signer using `createHASigner()` from the factory +2. Wrap the base keystore with `HAKeyStore` to provide HA-protected signing +3. Automatically start/stop the signer lifecycle -// Migrations run automatically on startup -const { signer, db } = await createHASigner({ - databaseUrl: process.env.DATABASE_URL, - enabled: true, - nodeId: 'validator-node-1', - pollingIntervalMs: 100, - signingTimeoutMs: 3000, -}); +No manual integration is required when using the validator client. -// Start background cleanup tasks -signer.start(); +## Manual Usage -// Sign with protection -const signature = await signer.signWithProtection( - validatorAddress, - messageHash, - { slot: 100n, blockNumber: 50n, dutyType: 'BLOCK_PROPOSAL' }, - async root => localSigner.signMessage(root), -); +For advanced use cases or testing, you can use the HA signer directly. **Note**: Database migrations must be run separately before creating the signer (see [Database Migrations](#database-migrations) below). -// Cleanup on shutdown -await signer.stop(); -await db.close(); -``` - -### Option 2: Manual Migrations (Recommended for Production) +### Basic Usage ```bash # 1. Run migrations separately (once per deployment) @@ -54,7 +36,7 @@ import { createHASigner } from '@aztec/validator-ha-signer/factory'; const { signer, db } = await createHASigner({ databaseUrl: process.env.DATABASE_URL, - enabled: true, + haSigningEnabled: true, nodeId: 'validator-node-1', pollingIntervalMs: 100, signingTimeoutMs: 3000, @@ -63,6 +45,14 @@ const { signer, db } = await createHASigner({ // Start background cleanup tasks signer.start(); +// Sign with protection +const signature = await signer.signWithProtection( + validatorAddress, + messageHash, + { slot: 100n, blockNumber: 50n, blockIndexWithinCheckpoint: 0, dutyType: 'BLOCK_PROPOSAL' }, + async root => localSigner.signMessage(root), +); + // On shutdown await signer.stop(); await db.close(); @@ -73,7 +63,7 @@ await db.close(); If you need custom pool configuration (e.g., max connections, idle timeout) or want to share a connection pool across multiple components: > **Note**: You still need to run migrations separately before using this approach. -> See [Option 2](#option-2-manual-migrations-recommended-for-production) above. +> See [Database Migrations](#database-migrations) below. ```typescript import { PostgresSlashingProtectionDatabase } from '@aztec/validator-ha-signer/db'; @@ -91,11 +81,11 @@ const db = new PostgresSlashingProtectionDatabase(pool); await db.initialize(); const signer = new ValidatorHASigner(db, { - enabled: true, + haSigningEnabled: true, nodeId: 'validator-node-1', pollingIntervalMs: 100, signingTimeoutMs: 3000, - maxStuckDutiesAgeMs: 72000, + maxStuckDutiesAgeMs: 144000, }); // Start background cleanup tasks @@ -111,11 +101,15 @@ await pool.end(); // You manage the pool lifecycle Set via environment variables or config object: - `VALIDATOR_HA_DATABASE_URL`: PostgreSQL connection string (e.g., `postgresql://user:pass@host:port/db`) -- `SLASHING_PROTECTION_ENABLED`: Whether slashing protection is enabled (default: true) -- `SLASHING_PROTECTION_NODE_ID`: Unique identifier for this validator node -- `SLASHING_PROTECTION_POLLING_INTERVAL_MS`: How often to check duty status (default: 100) -- `SLASHING_PROTECTION_SIGNING_TIMEOUT_MS`: Max wait for in-progress signing (default: 3000) -- `SLASHING_PROTECTION_MAX_STUCK_DUTIES_AGE_MS`: Max age of stuck duties before cleanup (default: 72000) +- `VALIDATOR_HA_SIGNING_ENABLED`: Whether HA signing / slashing protection is enabled (default: false) +- `VALIDATOR_HA_NODE_ID`: Unique identifier for this validator node (required when enabled) +- `VALIDATOR_HA_POLLING_INTERVAL_MS`: How often to check duty status (default: 100) +- `VALIDATOR_HA_SIGNING_TIMEOUT_MS`: Max wait for in-progress signing (default: 3000) +- `VALIDATOR_HA_MAX_STUCK_DUTIES_AGE_MS`: Max age of stuck duties before cleanup (default: 2 \* aztecSlotDuration) +- `VALIDATOR_HA_POOL_MAX`: Maximum number of connections in the pool (default: 10) +- `VALIDATOR_HA_POOL_MIN`: Minimum number of connections in the pool (default: 0) +- `VALIDATOR_HA_POOL_IDLE_TIMEOUT_MS`: Idle timeout for pool connections (default: 10000) +- `VALIDATOR_HA_POOL_CONNECTION_TIMEOUT_MS`: Connection timeout (default: 0, no timeout) ## Database Migrations @@ -170,9 +164,20 @@ When multiple validator nodes attempt to sign: 1. First node acquires lock and signs 2. Other nodes receive `DutyAlreadySignedError` (expected) -3. If different data detected: `SlashingProtectionError` (likely for block builder signing) +3. If different data detected: `SlashingProtectionError` (prevents slashing) 4. Failed attempts are auto-cleaned, allowing retry +### Signing Context + +All signing operations require a `SigningContext` that includes: + +- `slot`: The slot number +- `blockNumber`: The block number within the checkpoint +- `blockIndexWithinCheckpoint`: The index of the block within the checkpoint (use `-1` for N/A contexts) +- `dutyType`: The type of duty (e.g., `BLOCK_PROPOSAL`, `CHECKPOINT_ATTESTATION`, `AUTH_REQUEST`) + +Note: `AUTH_REQUEST` duties bypass HA protection since signing multiple times is safe for authentication requests. + ## Development ```bash diff --git a/yarn-project/validator-ha-signer/migrate.ts b/yarn-project/validator-ha-signer/migrate.ts new file mode 100644 index 000000000000..3bad5c8add8a --- /dev/null +++ b/yarn-project/validator-ha-signer/migrate.ts @@ -0,0 +1,52 @@ +#!/usr/bin/env node +/** + * Database migration runner + * + * Usage: + * ts-node migrate.ts up # Run pending migrations + * ts-node migrate.ts down # Rollback last migration + */ +import { runner } from 'node-pg-migrate'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +async function runMigrations() { + const databaseUrl = process.env.DATABASE_URL; + + if (!databaseUrl) { + console.error('Error: DATABASE_URL environment variable is required'); + process.exit(1); + } + + const direction = process.argv[2] as 'up' | 'down'; + + if (!direction || !['up', 'down'].includes(direction)) { + console.error('Usage: migrate.ts [up|down]'); + process.exit(1); + } + + try { + console.log(`Running migrations ${direction}...`); + + await runner({ + databaseUrl, + dir: join(__dirname, 'migrations'), + direction, + migrationsTable: 'pgmigrations', + count: direction === 'down' ? 1 : Infinity, + verbose: true, + log: msg => console.log(msg), + }); + + console.log(`Migrations ${direction} completed successfully`); + process.exit(0); + } catch (error) { + console.error('Migration failed:', error); + process.exit(1); + } +} + +runMigrations(); diff --git a/yarn-project/validator-ha-signer/package.json b/yarn-project/validator-ha-signer/package.json index fa4d8a18ef82..21413994e94b 100644 --- a/yarn-project/validator-ha-signer/package.json +++ b/yarn-project/validator-ha-signer/package.json @@ -10,7 +10,8 @@ "./migrations": "./dest/migrations.js", "./slashing-protection-service": "./dest/slashing_protection_service.js", "./types": "./dest/types.js", - "./validator-ha-signer": "./dest/validator_ha_signer.js" + "./validator-ha-signer": "./dest/validator_ha_signer.js", + "./test": "./dest/test/pglite_pool.js" }, "typedocOptions": { "entryPoints": [ @@ -74,10 +75,10 @@ }, "dependencies": { "@aztec/foundation": "workspace:^", - "@aztec/node-keystore": "workspace:^", "node-pg-migrate": "^8.0.4", "pg": "^8.11.3", - "tslib": "^2.4.0" + "tslib": "^2.4.0", + "zod": "^3.23.8" }, "devDependencies": { "@electric-sql/pglite": "^0.3.14", diff --git a/yarn-project/validator-ha-signer/src/config.ts b/yarn-project/validator-ha-signer/src/config.ts index 1a1719773db0..b21e4e2b8bf2 100644 --- a/yarn-project/validator-ha-signer/src/config.ts +++ b/yarn-project/validator-ha-signer/src/config.ts @@ -4,66 +4,34 @@ import { getConfigFromMappings, getDefaultConfig, numberConfigHelper, + optionalNumberConfigHelper, } from '@aztec/foundation/config'; +import type { ZodFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; /** - * Configuration for the slashing protection service + * Configuration for the Validator HA Signer + * + * This config is used for distributed locking and slashing protection + * when running multiple validator nodes in a high-availability setup. */ -export interface SlashingProtectionConfig { - /** Whether slashing protection is enabled */ - enabled: boolean; +export interface ValidatorHASignerConfig { + /** Whether HA signing / slashing protection is enabled */ + haSigningEnabled: boolean; /** Unique identifier for this node */ nodeId: string; /** How long to wait between polls when a duty is being signed (ms) */ pollingIntervalMs: number; /** Maximum time to wait for a duty being signed to complete (ms) */ signingTimeoutMs: number; - /** Maximum age of a stuck duty in ms */ - maxStuckDutiesAgeMs: number; -} - -export const slashingProtectionConfigMappings: ConfigMappingsType = { - enabled: { - env: 'SLASHING_PROTECTION_ENABLED', - description: 'Whether slashing protection is enabled', - ...booleanConfigHelper(true), - }, - nodeId: { - env: 'SLASHING_PROTECTION_NODE_ID', - description: 'The unique identifier for this node', - defaultValue: '', - }, - pollingIntervalMs: { - env: 'SLASHING_PROTECTION_POLLING_INTERVAL_MS', - description: 'The number of ms to wait between polls when a duty is being signed', - ...numberConfigHelper(100), - }, - signingTimeoutMs: { - env: 'SLASHING_PROTECTION_SIGNING_TIMEOUT_MS', - description: 'The maximum time to wait for a duty being signed to complete', - ...numberConfigHelper(3_000), - }, - maxStuckDutiesAgeMs: { - env: 'SLASHING_PROTECTION_MAX_STUCK_DUTIES_AGE_MS', - description: 'The maximum age of a stuck duty in ms', - // hard-coding at current 2 slot duration. This should be set by the validator on init - ...numberConfigHelper(72_000), - }, -}; - -export const defaultSlashingProtectionConfig: SlashingProtectionConfig = getDefaultConfig( - slashingProtectionConfigMappings, -); - -/** - * Configuration for creating an HA signer with PostgreSQL backend - */ -export interface CreateHASignerConfig extends SlashingProtectionConfig { + /** Maximum age of a stuck duty in ms (defaults to 2x hardcoded Aztec slot duration if not set) */ + maxStuckDutiesAgeMs?: number; /** * PostgreSQL connection string * Format: postgresql://user:password@host:port/database */ - databaseUrl: string; + databaseUrl?: string; /** * PostgreSQL connection pool configuration */ @@ -77,8 +45,32 @@ export interface CreateHASignerConfig extends SlashingProtectionConfig { poolConnectionTimeoutMs?: number; } -export const createHASignerConfigMappings: ConfigMappingsType = { - ...slashingProtectionConfigMappings, +export const validatorHASignerConfigMappings: ConfigMappingsType = { + haSigningEnabled: { + env: 'VALIDATOR_HA_SIGNING_ENABLED', + description: 'Whether HA signing / slashing protection is enabled', + ...booleanConfigHelper(false), + }, + nodeId: { + env: 'VALIDATOR_HA_NODE_ID', + description: 'The unique identifier for this node', + defaultValue: '', + }, + pollingIntervalMs: { + env: 'VALIDATOR_HA_POLLING_INTERVAL_MS', + description: 'The number of ms to wait between polls when a duty is being signed', + ...numberConfigHelper(100), + }, + signingTimeoutMs: { + env: 'VALIDATOR_HA_SIGNING_TIMEOUT_MS', + description: 'The maximum time to wait for a duty being signed to complete', + ...numberConfigHelper(3_000), + }, + maxStuckDutiesAgeMs: { + env: 'VALIDATOR_HA_MAX_STUCK_DUTIES_AGE_MS', + description: 'The maximum age of a stuck duty in ms (defaults to 2x Aztec slot duration)', + ...optionalNumberConfigHelper(), + }, databaseUrl: { env: 'VALIDATOR_HA_DATABASE_URL', description: @@ -106,11 +98,28 @@ export const createHASignerConfigMappings: ConfigMappingsType(createHASignerConfigMappings); +export function getConfigEnvVars(): ValidatorHASignerConfig { + return getConfigFromMappings(validatorHASignerConfigMappings); } + +export const ValidatorHASignerConfigSchema = z.object({ + haSigningEnabled: z.boolean(), + nodeId: z.string(), + pollingIntervalMs: z.number().min(0), + signingTimeoutMs: z.number().min(0), + maxStuckDutiesAgeMs: z.number().min(0).optional(), + databaseUrl: z.string().optional(), + poolMaxCount: z.number().min(0).optional(), + poolMinCount: z.number().min(0).optional(), + poolIdleTimeoutMs: z.number().min(0).optional(), + poolConnectionTimeoutMs: z.number().min(0).optional(), +}) satisfies ZodFor; diff --git a/yarn-project/validator-ha-signer/src/db/postgres.test.ts b/yarn-project/validator-ha-signer/src/db/postgres.test.ts index 66c895483325..c3fae060461c 100644 --- a/yarn-project/validator-ha-signer/src/db/postgres.test.ts +++ b/yarn-project/validator-ha-signer/src/db/postgres.test.ts @@ -1,8 +1,10 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { PGlite } from '@electric-sql/pglite'; import { afterEach, beforeEach, describe, expect, it, jest } from '@jest/globals'; +import type { QueryResult } from 'pg'; import { Pool } from '../test/pglite_pool.js'; import { PostgresSlashingProtectionDatabase } from './postgres.js'; @@ -26,6 +28,7 @@ describe('PostgreSQL Queries', () => { const VALIDATOR_ADDRESS = EthAddress.random().toString(); const SLOT = 100n; const BLOCK_NUMBER = 50n; + const BLOCK_INDEX_WITHIN_CHECKPOINT = 0; const DUTY_TYPE = DutyType.BLOCK_PROPOSAL; const MESSAGE_HASH = Buffer32.random().toString(); const NODE_ID = 'node-1'; @@ -48,6 +51,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -70,6 +74,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -81,6 +86,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, 'node-2', // Different node trying to acquire @@ -99,6 +105,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -112,6 +119,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, 'competing-node', @@ -127,6 +135,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DutyType.BLOCK_PROPOSAL, MESSAGE_HASH, NODE_ID, @@ -138,6 +147,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + -1, DutyType.ATTESTATION, MESSAGE_HASH, NODE_ID, @@ -153,6 +163,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), '100', BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -163,6 +174,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), '101', BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -181,6 +193,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -193,6 +206,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -201,8 +215,8 @@ describe('PostgreSQL Queries', () => { // Verify the update const selectResult = await db.query( `SELECT status, signature, completed_at FROM validator_duties - WHERE validator_address = $1 AND slot = $2 AND duty_type = $3`, - [VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE], + WHERE validator_address = $1 AND slot = $2 AND duty_type = $3 AND block_index_within_checkpoint = $4`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, BLOCK_INDEX_WITHIN_CHECKPOINT], ); const row = selectResult.rows[0]; @@ -216,6 +230,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -228,6 +243,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, 'wrong-token', ]); @@ -247,6 +263,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -257,6 +274,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -266,6 +284,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -286,6 +305,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -296,6 +316,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -314,6 +335,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -324,6 +346,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, 'wrong-token', ]); @@ -342,6 +365,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -352,6 +376,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -360,6 +385,7 @@ describe('PostgreSQL Queries', () => { VALIDATOR_ADDRESS.toString(), SLOT.toString(), DUTY_TYPE, + BLOCK_INDEX_WITHIN_CHECKPOINT, LOCK_TOKEN, ]); @@ -375,11 +401,12 @@ describe('PostgreSQL Queries', () => { }); describe('constraints', () => { - it('should enforce primary key constraint (validator_address, slot, duty_type)', async () => { + it('should enforce primary key constraint (validator_address, slot, duty_type, block_index_within_checkpoint)', async () => { await db.query(INSERT_OR_GET_DUTY, [ VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -389,12 +416,13 @@ describe('PostgreSQL Queries', () => { // Direct insert should fail due to primary key constraint await expect( db.query( - `INSERT INTO validator_duties (validator_address, slot, block_number, duty_type, status, message_hash, node_id, lock_token) - VALUES ($1, $2, $3, $4, 'signing', $5, $6, $7)`, + `INSERT INTO validator_duties (validator_address, slot, block_number, block_index_within_checkpoint, duty_type, status, message_hash, node_id, lock_token) + VALUES ($1, $2, $3, $4, $5, 'signing', $6, $7, $8)`, [ VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, 'node-2', @@ -407,9 +435,17 @@ describe('PostgreSQL Queries', () => { it('should enforce duty_type check constraint', async () => { await expect( db.query( - `INSERT INTO validator_duties (validator_address, slot, block_number, duty_type, status, message_hash, node_id, lock_token) - VALUES ($1, $2, $3, 'INVALID_TYPE', 'signing', $4, $5, $6)`, - [VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), MESSAGE_HASH, NODE_ID, LOCK_TOKEN], + `INSERT INTO validator_duties (validator_address, slot, block_number, block_index_within_checkpoint, duty_type, status, message_hash, node_id, lock_token) + VALUES ($1, $2, $3, $4, 'INVALID_TYPE', 'signing', $5, $6, $7)`, + [ + VALIDATOR_ADDRESS.toString(), + SLOT.toString(), + BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, + MESSAGE_HASH, + NODE_ID, + LOCK_TOKEN, + ], ), ).rejects.toThrow(); }); @@ -417,12 +453,13 @@ describe('PostgreSQL Queries', () => { it('should enforce status check constraint', async () => { await expect( db.query( - `INSERT INTO validator_duties (validator_address, slot, block_number, duty_type, status, message_hash, node_id, lock_token) - VALUES ($1, $2, $3, $4, 'invalid_status', $5, $6, $7)`, + `INSERT INTO validator_duties (validator_address, slot, block_number, block_index_within_checkpoint, duty_type, status, message_hash, node_id, lock_token) + VALUES ($1, $2, $3, $4, $5, 'invalid_status', $6, $7, $8)`, [ VALIDATOR_ADDRESS.toString(), SLOT.toString(), BLOCK_NUMBER.toString(), + BLOCK_INDEX_WITHIN_CHECKPOINT, DUTY_TYPE, MESSAGE_HASH, NODE_ID, @@ -455,16 +492,16 @@ describe('PostgresSlashingProtectionDatabase', () => { } await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); - await expect(db.initialize()).resolves.not.toThrow(); + await expect(spDb.initialize()).resolves.not.toThrow(); }); it('should throw when schema_version table does not exist', async () => { - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); - await expect(db.initialize()).rejects.toThrow( - 'Database schema not initialized. Please run migrations first: aztec migrate up --database-url ', + await expect(spDb.initialize()).rejects.toThrow( + 'Database schema not initialized. Please run migrations first: aztec migrate-ha-db up --database-url ', ); }); @@ -477,10 +514,10 @@ describe('PostgresSlashingProtectionDatabase', () => { ) `); - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); - await expect(db.initialize()).rejects.toThrow( - 'Database schema not initialized. Please run migrations first: aztec migrate up --database-url ', + await expect(spDb.initialize()).rejects.toThrow( + 'Database schema not initialized. Please run migrations first: aztec migrate-ha-db up --database-url ', ); }); @@ -491,10 +528,10 @@ describe('PostgresSlashingProtectionDatabase', () => { } await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION - 1]); - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); - await expect(db.initialize()).rejects.toThrow( - `Database schema version ${SCHEMA_VERSION - 1} is outdated (expected ${SCHEMA_VERSION}). Please run migrations: aztec migrate up --database-url `, + await expect(spDb.initialize()).rejects.toThrow( + `Database schema version ${SCHEMA_VERSION - 1} is outdated (expected ${SCHEMA_VERSION}). Please run migrations: aztec migrate-ha-db up --database-url `, ); }); @@ -505,26 +542,166 @@ describe('PostgresSlashingProtectionDatabase', () => { } await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION + 1]); - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); - await expect(db.initialize()).rejects.toThrow( + await expect(spDb.initialize()).rejects.toThrow( `Database schema version ${SCHEMA_VERSION + 1} is newer than expected (${SCHEMA_VERSION}). Please update your application.`, ); }); it('should allow closing the database connection', async () => { - const db = new PostgresSlashingProtectionDatabase(pool); + const spDb = new PostgresSlashingProtectionDatabase(pool); const endSpy = jest.spyOn(pool, 'end'); - await db.close(); + await spDb.close(); expect(endSpy).toHaveBeenCalled(); }); }); - describe('bigint handling', () => { + describe('tryInsertOrGetExisting retry logic', () => { const VALIDATOR_ADDRESS = EthAddress.random(); - const SLOT = 100n; - const BLOCK_NUMBER = 50n; + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); + const MESSAGE_HASH = Buffer32.random().toString(); + const NODE_ID = 'node-1'; + + beforeEach(async () => { + for (const statement of SCHEMA_SETUP) { + await pglite.query(statement); + } + await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); + }); + + it.each([1, 2, 3])('should retry %i time(s) and eventually succeed', async numRetries => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + let callCount = 0; + + // Mock pool.query to return no rows on first numRetries calls, then rows on subsequent call + const originalQuery = pool.query.bind(pool); + jest.spyOn(pool, 'query').mockImplementation(async (text: string, values?: any[]) => { + callCount++; + if (callCount <= numRetries) { + // First numRetries calls: simulate race condition - no rows returned + return { rows: [], command: 'SELECT', rowCount: 0, oid: 0, fields: [] } as QueryResult; + } + // Subsequent call: return actual result + return await originalQuery(text, values); + }); + + const result = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + // Should have retried numRetries times (initial attempt + numRetries retries) + expect(callCount).toBe(numRetries + 1); + // Should eventually succeed + expect(result.isNew).toBe(true); + expect(result.record.validatorAddress).toEqual(VALIDATOR_ADDRESS); + expect(result.record.slot).toBe(SLOT); + + // Restore original query + jest.restoreAllMocks(); + }); + + it('should throw error after all 3 retries are exhausted', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Mock pool.query to always return no rows + jest + .spyOn(pool, 'query') + .mockResolvedValue({ rows: [], command: 'SELECT', rowCount: 0, oid: 0, fields: [] } as QueryResult); + + await expect( + spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }), + ).rejects.toThrow('INSERT_OR_GET_DUTY returned no rows'); + + // Restore original query + jest.restoreAllMocks(); + }); + + it('should throw error if query returns more than one row (database constraint violation)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Mock pool.query to return multiple rows (simulating a database constraint violation) + // These use snake_case to match database column names + /* eslint-disable camelcase */ + const mockRow1 = { + validator_address: VALIDATOR_ADDRESS.toString(), + slot: SLOT.toString(), + block_number: BLOCK_NUMBER.toString(), + block_index_within_checkpoint: 0, + duty_type: DutyType.BLOCK_PROPOSAL, + status: DutyStatus.SIGNING, + message_hash: MESSAGE_HASH, + signature: null, + node_id: NODE_ID, + lock_token: 'token-1', + started_at: new Date(), + completed_at: null, + error_message: null, + is_new: true, + } as InsertOrGetRow; + const mockRow2 = { + validator_address: VALIDATOR_ADDRESS.toString(), + slot: SLOT.toString(), + block_number: BLOCK_NUMBER.toString(), + block_index_within_checkpoint: 0, + duty_type: DutyType.BLOCK_PROPOSAL, + status: DutyStatus.SIGNING, + message_hash: MESSAGE_HASH, + signature: null, + node_id: 'node-2', + lock_token: 'token-2', + started_at: new Date(), + completed_at: null, + error_message: null, + is_new: false, + } as InsertOrGetRow; + /* eslint-enable camelcase */ + + jest.spyOn(pool, 'query').mockResolvedValue({ + rows: [mockRow1, mockRow2], + command: 'SELECT', + rowCount: 2, + oid: 0, + fields: [], + } as QueryResult); + + await expect( + spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }), + ).rejects.toThrow('INSERT_OR_GET_DUTY returned 2 rows (expected exactly 1).'); + + // Restore original query + jest.restoreAllMocks(); + }); + }); + + describe('large numbers handling', () => { + const VALIDATOR_ADDRESS = EthAddress.random(); + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); const MESSAGE_HASH = Buffer32.random().toString(); const NODE_ID = 'node-1'; @@ -536,13 +713,14 @@ describe('PostgresSlashingProtectionDatabase', () => { }); it('should handle large slot numbers correctly', async () => { - const largeSlot = 9007199254740991n; // Max safe integer + const largeSlot = SlotNumber(Number.MAX_SAFE_INTEGER); // Max safe integer - const db = new PostgresSlashingProtectionDatabase(pool); - const result = await db.tryInsertOrGetExisting({ + const spDb = new PostgresSlashingProtectionDatabase(pool); + const result = await spDb.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, slot: largeSlot, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, dutyType: DutyType.BLOCK_PROPOSAL, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -553,13 +731,14 @@ describe('PostgresSlashingProtectionDatabase', () => { }); it('should handle large block numbers correctly', async () => { - const largeBlockNumber = 9007199254740991n; + const largeBlockNumber = BlockNumber(Number.MAX_SAFE_INTEGER); - const db = new PostgresSlashingProtectionDatabase(pool); - const result = await db.tryInsertOrGetExisting({ + const spDb = new PostgresSlashingProtectionDatabase(pool); + const result = await spDb.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: largeBlockNumber, + blockIndexWithinCheckpoint: 0, dutyType: DutyType.BLOCK_PROPOSAL, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -569,4 +748,331 @@ describe('PostgresSlashingProtectionDatabase', () => { expect(result.record.blockNumber).toBe(largeBlockNumber); }); }); + + describe('updateDutySigned', () => { + const VALIDATOR_ADDRESS = EthAddress.random(); + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); + const MESSAGE_HASH = Buffer32.random().toString(); + const NODE_ID = 'node-1'; + const SIGNATURE = '0xsignature123'; + + beforeEach(async () => { + for (const statement of SCHEMA_SETUP) { + await pglite.query(statement); + } + await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); + }); + + it('should return true and update duty to signed status with correct token', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert a duty first + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + expect(insertResult.isNew).toBe(true); + const lockToken = insertResult.record.lockToken; + + // Update to signed + const success = await spDb.updateDutySigned( + VALIDATOR_ADDRESS, + SLOT, + DutyType.BLOCK_PROPOSAL, + SIGNATURE, + lockToken, + 0, + ); + + expect(success).toBe(true); + + // Verify the update + const selectResult = await pglite.query( + `SELECT status, signature, completed_at FROM validator_duties + WHERE validator_address = $1 AND slot = $2 AND duty_type = $3 AND block_index_within_checkpoint = $4`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString(), DutyType.BLOCK_PROPOSAL, 0], + ); + + const row = selectResult.rows[0]; + expect(row.status).toBe(DutyStatus.SIGNED); + expect(row.signature).toBe(SIGNATURE); + expect(row.completed_at).toBeTruthy(); + }); + + it('should return false with wrong token', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert a duty first + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + expect(insertResult.isNew).toBe(true); + + // Try to update with wrong token + const success = await spDb.updateDutySigned( + VALIDATOR_ADDRESS, + SLOT, + DutyType.BLOCK_PROPOSAL, + SIGNATURE, + 'wrong-token', + 0, + ); + + expect(success).toBe(false); + + // Verify still in signing state + const selectResult = await pglite.query( + `SELECT status FROM validator_duties WHERE validator_address = $1 AND slot = $2`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString()], + ); + expect(selectResult.rows[0].status).toBe(DutyStatus.SIGNING); + }); + + it('should return false if duty not found', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + const success = await spDb.updateDutySigned( + VALIDATOR_ADDRESS, + SLOT, + DutyType.BLOCK_PROPOSAL, + SIGNATURE, + 'some-token', + 0, + ); + + expect(success).toBe(false); + }); + + it('should return false if status is not signing', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert and mark as signed + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + const lockToken = insertResult.record.lockToken; + await spDb.updateDutySigned(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, SIGNATURE, lockToken, 0); + + // Try to update again with correct token + const success = await spDb.updateDutySigned( + VALIDATOR_ADDRESS, + SLOT, + DutyType.BLOCK_PROPOSAL, + 'new-signature', + lockToken, + 0, + ); + + expect(success).toBe(false); + + // Verify signature unchanged + const selectResult = await pglite.query( + `SELECT signature FROM validator_duties WHERE validator_address = $1 AND slot = $2`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString()], + ); + expect(selectResult.rows[0].signature).toBe(SIGNATURE); + }); + + it('should handle non-block-proposal duties (uses -1 for block index)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert an ATTESTATION duty (non-block-proposal, so no blockIndexWithinCheckpoint) + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + dutyType: DutyType.ATTESTATION, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + expect(insertResult.isNew).toBe(true); + const lockToken = insertResult.record.lockToken; + + // Update to signed with -1 for block index (non-block-proposal duty) + const success = await spDb.updateDutySigned( + VALIDATOR_ADDRESS, + SLOT, + DutyType.ATTESTATION, + SIGNATURE, + lockToken, + -1, + ); + + expect(success).toBe(true); + + // Verify the update (block_index_within_checkpoint should be -1) + const selectResult = await pglite.query( + `SELECT status, signature, completed_at, block_index_within_checkpoint FROM validator_duties + WHERE validator_address = $1 AND slot = $2 AND duty_type = $3`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString(), DutyType.ATTESTATION], + ); + + const row = selectResult.rows[0]; + expect(row.status).toBe(DutyStatus.SIGNED); + expect(row.signature).toBe(SIGNATURE); + expect(row.completed_at).toBeTruthy(); + expect(row.block_index_within_checkpoint).toBe(-1); + }); + }); + + describe('deleteDuty', () => { + const VALIDATOR_ADDRESS = EthAddress.random(); + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); + const MESSAGE_HASH = Buffer32.random().toString(); + const NODE_ID = 'node-1'; + + beforeEach(async () => { + for (const statement of SCHEMA_SETUP) { + await pglite.query(statement); + } + await pglite.query(INSERT_SCHEMA_VERSION, [SCHEMA_VERSION]); + }); + + it('should return true and delete a signing duty with correct token', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert a duty first + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + expect(insertResult.isNew).toBe(true); + const lockToken = insertResult.record.lockToken; + + // Delete the duty + const success = await spDb.deleteDuty(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, lockToken, 0); + + expect(success).toBe(true); + + // Verify deleted + const selectResult = await pglite.query( + `SELECT * FROM validator_duties WHERE validator_address = $1 AND slot = $2`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString()], + ); + expect(selectResult.rows.length).toBe(0); + }); + + it('should return false with wrong token', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert a duty first + await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + // Try to delete with wrong token + const success = await spDb.deleteDuty(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, 'wrong-token', 0); + + expect(success).toBe(false); + + // Verify still exists + const selectResult = await pglite.query( + `SELECT * FROM validator_duties WHERE validator_address = $1 AND slot = $2`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString()], + ); + expect(selectResult.rows.length).toBe(1); + }); + + it('should return false if duty not found', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + const success = await spDb.deleteDuty(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, 'some-token', 0); + + expect(success).toBe(false); + }); + + it('should return false if duty is already signed', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert and mark as signed + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + const lockToken = insertResult.record.lockToken; + await spDb.updateDutySigned(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, '0xsignature', lockToken, 0); + + // Try to delete with correct token (should fail because duty is signed) + const success = await spDb.deleteDuty(VALIDATOR_ADDRESS, SLOT, DutyType.BLOCK_PROPOSAL, lockToken, 0); + + expect(success).toBe(false); + + // Verify still exists + const selectResult = await pglite.query( + `SELECT * FROM validator_duties WHERE validator_address = $1 AND slot = $2`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString()], + ); + expect(selectResult.rows.length).toBe(1); + }); + + it('should handle non-block-proposal duties (uses -1 for block index)', async () => { + const spDb = new PostgresSlashingProtectionDatabase(pool); + + // Insert an ATTESTATION duty (non-block-proposal, so no blockIndexWithinCheckpoint) + const insertResult = await spDb.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + dutyType: DutyType.ATTESTATION, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + expect(insertResult.isNew).toBe(true); + const lockToken = insertResult.record.lockToken; + + // Delete with -1 for block index (non-block-proposal duty) + const success = await spDb.deleteDuty(VALIDATOR_ADDRESS, SLOT, DutyType.ATTESTATION, lockToken, -1); + + expect(success).toBe(true); + + // Verify deleted + const selectResult = await pglite.query( + `SELECT * FROM validator_duties WHERE validator_address = $1 AND slot = $2 AND duty_type = $3`, + [VALIDATOR_ADDRESS.toString(), SLOT.toString(), DutyType.ATTESTATION], + ); + expect(selectResult.rows.length).toBe(0); + }); + }); }); diff --git a/yarn-project/validator-ha-signer/src/db/postgres.ts b/yarn-project/validator-ha-signer/src/db/postgres.ts index d4001172ff3c..225a35a65526 100644 --- a/yarn-project/validator-ha-signer/src/db/postgres.ts +++ b/yarn-project/validator-ha-signer/src/db/postgres.ts @@ -1,9 +1,11 @@ /** * PostgreSQL implementation of SlashingProtectionDatabase */ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { randomBytes } from '@aztec/foundation/crypto/random'; import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; +import { makeBackoff, retry } from '@aztec/foundation/retry'; import type { QueryResult, QueryResultRow } from 'pg'; @@ -16,6 +18,7 @@ import { UPDATE_DUTY_SIGNED, } from './schema.js'; import type { CheckAndRecordParams, DutyRow, DutyType, InsertOrGetRow, ValidatorDutyRecord } from './types.js'; +import { getBlockIndexFromDutyIdentifier } from './types.js'; /** * Minimal pool interface for database operations. @@ -57,13 +60,13 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat dbVersion = result.rows[0].version; } catch { throw new Error( - 'Database schema not initialized. Please run migrations first: aztec migrate up --database-url ', + 'Database schema not initialized. Please run migrations first: aztec migrate-ha-db up --database-url ', ); } if (dbVersion < SCHEMA_VERSION) { throw new Error( - `Database schema version ${dbVersion} is outdated (expected ${SCHEMA_VERSION}). Please run migrations: aztec migrate up --database-url `, + `Database schema version ${dbVersion} is outdated (expected ${SCHEMA_VERSION}). Please run migrations: aztec migrate-ha-db up --database-url `, ); } @@ -81,24 +84,54 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat * * @returns { isNew: true, record } if we successfully inserted and acquired the lock * @returns { isNew: false, record } if a record already exists. lock_token is empty if the record already exists. + * + * Retries if no rows are returned, which can happen under high concurrency + * when another transaction just committed the row but it's not yet visible. */ async tryInsertOrGetExisting(params: CheckAndRecordParams): Promise { // create a token for ownership verification const lockToken = randomBytes(16).toString('hex'); - const result: QueryResult = await this.pool.query(INSERT_OR_GET_DUTY, [ - params.validatorAddress.toString(), - params.slot.toString(), - params.blockNumber.toString(), - params.dutyType, - params.messageHash, - params.nodeId, - lockToken, - ]); + // Use fast retries with custom backoff: 10ms, 20ms, 30ms (then stop) + const fastBackoff = makeBackoff([0.01, 0.02, 0.03]); + + // Get the normalized block index using type-safe helper + const blockIndexWithinCheckpoint = getBlockIndexFromDutyIdentifier(params); + + const result = await retry>( + async () => { + const queryResult: QueryResult = await this.pool.query(INSERT_OR_GET_DUTY, [ + params.validatorAddress.toString(), + params.slot.toString(), + params.blockNumber.toString(), + blockIndexWithinCheckpoint, + params.dutyType, + params.messageHash, + params.nodeId, + lockToken, + ]); + + // Throw error if no rows to trigger retry + if (queryResult.rows.length === 0) { + throw new Error('INSERT_OR_GET_DUTY returned no rows'); + } + + return queryResult; + }, + `INSERT_OR_GET_DUTY for node ${params.nodeId}`, + fastBackoff, + this.log, + true, + ); if (result.rows.length === 0) { - // This shouldn't happen - the query always returns either the inserted or existing row - throw new Error('INSERT_OR_GET_DUTY returned no rows'); + // this should never happen as the retry function should throw if it still fails after retries + throw new Error('INSERT_OR_GET_DUTY returned no rows after retries'); + } + + if (result.rows.length > 1) { + // this should never happen if database constraints are correct (PRIMARY KEY should prevent duplicates) + throw new Error(`INSERT_OR_GET_DUTY returned ${result.rows.length} rows (expected exactly 1).`); } const row = result.rows[0]; @@ -116,16 +149,18 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat */ async updateDutySigned( validatorAddress: EthAddress, - slot: bigint, + slot: SlotNumber, dutyType: DutyType, signature: string, lockToken: string, + blockIndexWithinCheckpoint: number, ): Promise { const result = await this.pool.query(UPDATE_DUTY_SIGNED, [ signature, validatorAddress.toString(), slot.toString(), dutyType, + blockIndexWithinCheckpoint, lockToken, ]); @@ -134,6 +169,7 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat validatorAddress: validatorAddress.toString(), slot: slot.toString(), dutyType, + blockIndexWithinCheckpoint, }); return false; } @@ -149,14 +185,16 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat */ async deleteDuty( validatorAddress: EthAddress, - slot: bigint, + slot: SlotNumber, dutyType: DutyType, lockToken: string, + blockIndexWithinCheckpoint: number, ): Promise { const result = await this.pool.query(DELETE_DUTY, [ validatorAddress.toString(), slot.toString(), dutyType, + blockIndexWithinCheckpoint, lockToken, ]); @@ -165,6 +203,7 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat validatorAddress: validatorAddress.toString(), slot: slot.toString(), dutyType, + blockIndexWithinCheckpoint, }); return false; } @@ -177,8 +216,9 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat private rowToRecord(row: DutyRow): ValidatorDutyRecord { return { validatorAddress: EthAddress.fromString(row.validator_address), - slot: BigInt(row.slot), - blockNumber: BigInt(row.block_number), + slot: SlotNumber.fromString(row.slot), + blockNumber: BlockNumber.fromString(row.block_number), + blockIndexWithinCheckpoint: row.block_index_within_checkpoint, dutyType: row.duty_type, status: row.status, messageHash: row.message_hash, diff --git a/yarn-project/validator-ha-signer/src/db/schema.ts b/yarn-project/validator-ha-signer/src/db/schema.ts index 5bd132c7ec88..c72734a51d95 100644 --- a/yarn-project/validator-ha-signer/src/db/schema.ts +++ b/yarn-project/validator-ha-signer/src/db/schema.ts @@ -19,7 +19,8 @@ CREATE TABLE IF NOT EXISTS validator_duties ( validator_address VARCHAR(42) NOT NULL, slot BIGINT NOT NULL, block_number BIGINT NOT NULL, - duty_type VARCHAR(30) NOT NULL CHECK (duty_type IN ('BLOCK_PROPOSAL', 'ATTESTATION', 'ATTESTATIONS_AND_SIGNERS')), + block_index_within_checkpoint INTEGER NOT NULL DEFAULT 0, + duty_type VARCHAR(30) NOT NULL CHECK (duty_type IN ('BLOCK_PROPOSAL', 'CHECKPOINT_PROPOSAL', 'ATTESTATION', 'ATTESTATIONS_AND_SIGNERS', 'GOVERNANCE_VOTE', 'SLASHING_VOTE')), status VARCHAR(20) NOT NULL CHECK (status IN ('signing', 'signed', 'failed')), message_hash VARCHAR(66) NOT NULL, signature VARCHAR(132), @@ -29,7 +30,7 @@ CREATE TABLE IF NOT EXISTS validator_duties ( completed_at TIMESTAMP, error_message TEXT, - PRIMARY KEY (validator_address, slot, duty_type), + PRIMARY KEY (validator_address, slot, duty_type, block_index_within_checkpoint), CHECK (completed_at IS NULL OR completed_at >= started_at) ); `; @@ -92,6 +93,10 @@ SELECT version FROM schema_version ORDER BY version DESC LIMIT 1; * returns the existing record instead. * * Returns the record with an `is_new` flag indicating whether we inserted or got existing. + * + * Note: In high concurrency scenarios, if the INSERT conflicts and another transaction + * just committed the row, there's a small window where the SELECT might not see it yet. + * The application layer should retry if no rows are returned. */ export const INSERT_OR_GET_DUTY = ` WITH inserted AS ( @@ -99,18 +104,20 @@ WITH inserted AS ( validator_address, slot, block_number, + block_index_within_checkpoint, duty_type, status, message_hash, node_id, lock_token, started_at - ) VALUES ($1, $2, $3, $4, 'signing', $5, $6, $7, CURRENT_TIMESTAMP) - ON CONFLICT (validator_address, slot, duty_type) DO NOTHING + ) VALUES ($1, $2, $3, $4, $5, 'signing', $6, $7, $8, CURRENT_TIMESTAMP) + ON CONFLICT (validator_address, slot, duty_type, block_index_within_checkpoint) DO NOTHING RETURNING validator_address, slot, block_number, + block_index_within_checkpoint, duty_type, status, message_hash, @@ -128,6 +135,7 @@ SELECT validator_address, slot, block_number, + block_index_within_checkpoint, duty_type, status, message_hash, @@ -141,7 +149,8 @@ SELECT FROM validator_duties WHERE validator_address = $1 AND slot = $2 - AND duty_type = $4 + AND duty_type = $5 + AND block_index_within_checkpoint = $4 AND NOT EXISTS (SELECT 1 FROM inserted); `; @@ -156,8 +165,9 @@ SET status = 'signed', WHERE validator_address = $2 AND slot = $3 AND duty_type = $4 + AND block_index_within_checkpoint = $5 AND status = 'signing' - AND lock_token = $5; + AND lock_token = $6; `; /** @@ -169,8 +179,9 @@ DELETE FROM validator_duties WHERE validator_address = $1 AND slot = $2 AND duty_type = $3 + AND block_index_within_checkpoint = $4 AND status = 'signing' - AND lock_token = $4; + AND lock_token = $5; `; /** @@ -223,6 +234,7 @@ SELECT validator_address, slot, block_number, + block_index_within_checkpoint, duty_type, status, message_hash, diff --git a/yarn-project/validator-ha-signer/src/db/types.test.ts b/yarn-project/validator-ha-signer/src/db/types.test.ts new file mode 100644 index 000000000000..3efc9b4ef6b0 --- /dev/null +++ b/yarn-project/validator-ha-signer/src/db/types.test.ts @@ -0,0 +1,131 @@ +import { SlotNumber } from '@aztec/foundation/branded-types'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { describe, expect, it } from '@jest/globals'; + +import { + type BlockProposalDutyIdentifier, + DutyType, + type OtherDutyIdentifier, + getBlockIndexFromDutyIdentifier, + normalizeBlockIndex, +} from './types.js'; + +describe('normalizeBlockIndex', () => { + describe('BLOCK_PROPOSAL duties', () => { + it('should return the blockIndexWithinCheckpoint when valid (>= 0)', () => { + expect(normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, 0)).toBe(0); + expect(normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, 1)).toBe(1); + expect(normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, 2)).toBe(2); + expect(normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, 100)).toBe(100); + }); + + it('should throw when blockIndexWithinCheckpoint is undefined', () => { + expect(() => normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, undefined)).toThrow( + 'BLOCK_PROPOSAL duties require blockIndexWithinCheckpoint to be specified', + ); + }); + + it('should throw when blockIndexWithinCheckpoint is negative', () => { + expect(() => normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, -1)).toThrow( + 'BLOCK_PROPOSAL duties require blockIndexWithinCheckpoint >= 0, got -1', + ); + expect(() => normalizeBlockIndex(DutyType.BLOCK_PROPOSAL, -100)).toThrow( + 'BLOCK_PROPOSAL duties require blockIndexWithinCheckpoint >= 0, got -100', + ); + }); + }); + + describe('non-BLOCK_PROPOSAL duties', () => { + const nonBlockProposalTypes = [ + DutyType.CHECKPOINT_PROPOSAL, + DutyType.ATTESTATION, + DutyType.ATTESTATIONS_AND_SIGNERS, + DutyType.GOVERNANCE_VOTE, + DutyType.SLASHING_VOTE, + ]; + + it.each(nonBlockProposalTypes)('should return -1 for %s regardless of input', dutyType => { + // Should return -1 even when undefined + expect(normalizeBlockIndex(dutyType, undefined)).toBe(-1); + + // Should return -1 even when a value is passed (ignores the value) + expect(normalizeBlockIndex(dutyType, 0)).toBe(-1); + expect(normalizeBlockIndex(dutyType, 1)).toBe(-1); + expect(normalizeBlockIndex(dutyType, 100)).toBe(-1); + expect(normalizeBlockIndex(dutyType, -1)).toBe(-1); + }); + }); +}); + +describe('getBlockIndexFromDutyIdentifier', () => { + const validatorAddress = EthAddress.random(); + const slot = SlotNumber(100); + + describe('BLOCK_PROPOSAL duties', () => { + it('should return the blockIndexWithinCheckpoint', () => { + const duty: BlockProposalDutyIdentifier = { + validatorAddress, + slot, + blockIndexWithinCheckpoint: 0, + dutyType: DutyType.BLOCK_PROPOSAL, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(0); + + const duty2: BlockProposalDutyIdentifier = { + validatorAddress, + slot, + blockIndexWithinCheckpoint: 5, + dutyType: DutyType.BLOCK_PROPOSAL, + }; + expect(getBlockIndexFromDutyIdentifier(duty2)).toBe(5); + }); + }); + + describe('non-BLOCK_PROPOSAL duties', () => { + it('should return -1 for CHECKPOINT_PROPOSAL', () => { + const duty: OtherDutyIdentifier = { + validatorAddress, + slot, + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(-1); + }); + + it('should return -1 for ATTESTATION', () => { + const duty: OtherDutyIdentifier = { + validatorAddress, + slot, + dutyType: DutyType.ATTESTATION, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(-1); + }); + + it('should return -1 for ATTESTATIONS_AND_SIGNERS', () => { + const duty: OtherDutyIdentifier = { + validatorAddress, + slot, + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(-1); + }); + + it('should return -1 for GOVERNANCE_VOTE', () => { + const duty: OtherDutyIdentifier = { + validatorAddress, + slot, + dutyType: DutyType.GOVERNANCE_VOTE, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(-1); + }); + + it('should return -1 for SLASHING_VOTE', () => { + const duty: OtherDutyIdentifier = { + validatorAddress, + slot, + dutyType: DutyType.SLASHING_VOTE, + }; + expect(getBlockIndexFromDutyIdentifier(duty)).toBe(-1); + }); + }); +}); diff --git a/yarn-project/validator-ha-signer/src/db/types.ts b/yarn-project/validator-ha-signer/src/db/types.ts index af56e6d782bc..9f1a8e1b5546 100644 --- a/yarn-project/validator-ha-signer/src/db/types.ts +++ b/yarn-project/validator-ha-signer/src/db/types.ts @@ -1,3 +1,4 @@ +import type { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; @@ -8,6 +9,7 @@ export interface DutyRow { validator_address: string; slot: string; block_number: string; + block_index_within_checkpoint: number; duty_type: DutyType; status: DutyStatus; message_hash: string; @@ -31,8 +33,13 @@ export interface InsertOrGetRow extends DutyRow { */ export enum DutyType { BLOCK_PROPOSAL = 'BLOCK_PROPOSAL', + CHECKPOINT_PROPOSAL = 'CHECKPOINT_PROPOSAL', ATTESTATION = 'ATTESTATION', ATTESTATIONS_AND_SIGNERS = 'ATTESTATIONS_AND_SIGNERS', + GOVERNANCE_VOTE = 'GOVERNANCE_VOTE', + SLASHING_VOTE = 'SLASHING_VOTE', + AUTH_REQUEST = 'AUTH_REQUEST', + TXS = 'TXS', } /** @@ -50,9 +57,11 @@ export interface ValidatorDutyRecord { /** Ethereum address of the validator */ validatorAddress: EthAddress; /** Slot number for this duty */ - slot: bigint; + slot: SlotNumber; /** Block number for this duty */ - blockNumber: bigint; + blockNumber: BlockNumber; + /** Block index within checkpoint (0, 1, 2... for block proposals, -1 for other duty types) */ + blockIndexWithinCheckpoint: number; /** Type of duty being performed */ dutyType: DutyType; /** Current status of the duty */ @@ -74,44 +83,119 @@ export interface ValidatorDutyRecord { } /** - * Minimal info needed to identify a unique duty + * Duty identifier for block proposals. + * blockIndexWithinCheckpoint is REQUIRED and must be >= 0. */ -export interface DutyIdentifier { +export interface BlockProposalDutyIdentifier { validatorAddress: EthAddress; - slot: bigint; - dutyType: DutyType; + slot: SlotNumber; + /** Block index within checkpoint (0, 1, 2...). Required for block proposals. */ + blockIndexWithinCheckpoint: number; + dutyType: DutyType.BLOCK_PROPOSAL; } /** - * Parameters for checking and recording a new duty + * Duty identifier for non-block-proposal duties. + * blockIndexWithinCheckpoint is not applicable (internally stored as -1). */ -export interface CheckAndRecordParams { +export interface OtherDutyIdentifier { validatorAddress: EthAddress; - slot: bigint; - blockNumber: bigint; - dutyType: DutyType; + slot: SlotNumber; + dutyType: + | DutyType.CHECKPOINT_PROPOSAL + | DutyType.ATTESTATION + | DutyType.ATTESTATIONS_AND_SIGNERS + | DutyType.GOVERNANCE_VOTE + | DutyType.SLASHING_VOTE + | DutyType.AUTH_REQUEST + | DutyType.TXS; +} + +/** + * Minimal info needed to identify a unique duty. + * Uses discriminated union to enforce type safety: + * - BLOCK_PROPOSAL duties MUST have blockIndexWithinCheckpoint >= 0 + * - Other duty types do NOT have blockIndexWithinCheckpoint (internally -1) + */ +export type DutyIdentifier = BlockProposalDutyIdentifier | OtherDutyIdentifier; + +/** + * Validates and normalizes the block index for a duty. + * - BLOCK_PROPOSAL: validates blockIndexWithinCheckpoint is provided and >= 0 + * - Other duty types: always returns -1 + * + * @throws Error if BLOCK_PROPOSAL is missing blockIndexWithinCheckpoint or has invalid value + */ +export function normalizeBlockIndex(dutyType: DutyType, blockIndexWithinCheckpoint: number | undefined): number { + if (dutyType === DutyType.BLOCK_PROPOSAL) { + if (blockIndexWithinCheckpoint === undefined) { + throw new Error('BLOCK_PROPOSAL duties require blockIndexWithinCheckpoint to be specified'); + } + if (blockIndexWithinCheckpoint < 0) { + throw new Error( + `BLOCK_PROPOSAL duties require blockIndexWithinCheckpoint >= 0, got ${blockIndexWithinCheckpoint}`, + ); + } + return blockIndexWithinCheckpoint; + } + // For all other duty types, always use -1 + return -1; +} + +/** + * Gets the block index from a DutyIdentifier. + * - BLOCK_PROPOSAL: returns the blockIndexWithinCheckpoint + * - Other duty types: returns -1 + */ +export function getBlockIndexFromDutyIdentifier(duty: DutyIdentifier): number { + if (duty.dutyType === DutyType.BLOCK_PROPOSAL) { + return duty.blockIndexWithinCheckpoint; + } + return -1; +} + +/** + * Additional parameters for checking and recording a new duty + */ +interface CheckAndRecordExtra { + /** Block number for this duty */ + blockNumber: BlockNumber | CheckpointNumber; + /** The signing root (hash) for this duty */ messageHash: string; + /** Identifier for the node that acquired the lock */ nodeId: string; } /** - * Parameters for recording a successful signing + * Parameters for checking and recording a new duty. + * Uses intersection with DutyIdentifier to preserve the discriminated union. */ -export interface RecordSuccessParams { - validatorAddress: EthAddress; - slot: bigint; - dutyType: DutyType; +export type CheckAndRecordParams = DutyIdentifier & CheckAndRecordExtra; + +/** + * Additional parameters for recording a successful signing + */ +interface RecordSuccessExtra { signature: Signature; nodeId: string; lockToken: string; } /** - * Parameters for deleting a duty + * Parameters for recording a successful signing. + * Uses intersection with DutyIdentifier to preserve the discriminated union. */ -export interface DeleteDutyParams { - validatorAddress: EthAddress; - slot: bigint; - dutyType: DutyType; +export type RecordSuccessParams = DutyIdentifier & RecordSuccessExtra; + +/** + * Additional parameters for deleting a duty + */ +interface DeleteDutyExtra { lockToken: string; } + +/** + * Parameters for deleting a duty. + * Uses intersection with DutyIdentifier to preserve the discriminated union. + */ +export type DeleteDutyParams = DutyIdentifier & DeleteDutyExtra; diff --git a/yarn-project/validator-ha-signer/src/errors.test.ts b/yarn-project/validator-ha-signer/src/errors.test.ts index fb40e1017771..fee4d34f70f9 100644 --- a/yarn-project/validator-ha-signer/src/errors.test.ts +++ b/yarn-project/validator-ha-signer/src/errors.test.ts @@ -1,3 +1,5 @@ +import { SlotNumber } from '@aztec/foundation/branded-types'; + import { describe, expect, it } from '@jest/globals'; import { DutyAlreadySignedError, SlashingProtectionError } from './errors.js'; @@ -5,33 +7,35 @@ import { DutyType } from './types.js'; describe('DutyAlreadySignedError', () => { it('should create error with correct properties', () => { - const slot = 100n; + const slot = SlotNumber(100); const dutyType = DutyType.BLOCK_PROPOSAL; + const blockIndexWithinCheckpoint = 0; const signedByNode = 'node-1'; - const error = new DutyAlreadySignedError(slot, dutyType, signedByNode); + const error = new DutyAlreadySignedError(slot, dutyType, blockIndexWithinCheckpoint, signedByNode); expect(error).toBeInstanceOf(Error); expect(error.name).toBe('DutyAlreadySignedError'); expect(error.slot).toBe(slot); expect(error.dutyType).toBe(dutyType); + expect(error.blockIndexWithinCheckpoint).toBe(blockIndexWithinCheckpoint); expect(error.signedByNode).toBe(signedByNode); expect(error.message).toBe('Duty BLOCK_PROPOSAL for slot 100 already signed by node node-1'); }); it('should work with ATTESTATION duty type', () => { - const error = new DutyAlreadySignedError(200n, DutyType.ATTESTATION, 'node-2'); + const error = new DutyAlreadySignedError(SlotNumber(200), DutyType.ATTESTATION, -1, 'node-2'); expect(error.message).toBe('Duty ATTESTATION for slot 200 already signed by node node-2'); }); it('should work with ATTESTATIONS_AND_SIGNERS duty type', () => { - const error = new DutyAlreadySignedError(300n, DutyType.ATTESTATIONS_AND_SIGNERS, 'node-3'); + const error = new DutyAlreadySignedError(SlotNumber(300), DutyType.ATTESTATIONS_AND_SIGNERS, -1, 'node-3'); expect(error.message).toBe('Duty ATTESTATIONS_AND_SIGNERS for slot 300 already signed by node node-3'); }); it('should work with large slot numbers', () => { - const largeSlot = 9007199254740991n; - const error = new DutyAlreadySignedError(largeSlot, DutyType.BLOCK_PROPOSAL, 'node-large'); + const largeSlot = SlotNumber(Number.MAX_SAFE_INTEGER); + const error = new DutyAlreadySignedError(largeSlot, DutyType.BLOCK_PROPOSAL, 0, 'node-large'); expect(error.slot).toBe(largeSlot); expect(error.message).toContain(largeSlot.toString()); }); @@ -42,21 +46,39 @@ describe('SlashingProtectionError', () => { const attemptedRoot = '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'; it('should create error with correct properties', () => { - const slot = 100n; + const slot = SlotNumber(100); const dutyType = DutyType.BLOCK_PROPOSAL; + const blockIndexWithinCheckpoint = 0; + const signedByNode = 'node-1'; - const error = new SlashingProtectionError(slot, dutyType, existingRoot, attemptedRoot); + const error = new SlashingProtectionError( + slot, + dutyType, + blockIndexWithinCheckpoint, + existingRoot, + attemptedRoot, + signedByNode, + ); expect(error).toBeInstanceOf(Error); expect(error.name).toBe('SlashingProtectionError'); expect(error.slot).toBe(slot); expect(error.dutyType).toBe(dutyType); + expect(error.blockIndexWithinCheckpoint).toBe(blockIndexWithinCheckpoint); expect(error.existingMessageHash).toBe(existingRoot); expect(error.attemptedMessageHash).toBe(attemptedRoot); + expect(error.signedByNode).toBe(signedByNode); }); it('should include truncated signing roots in message', () => { - const error = new SlashingProtectionError(100n, DutyType.BLOCK_PROPOSAL, existingRoot, attemptedRoot); + const error = new SlashingProtectionError( + SlotNumber(100), + DutyType.BLOCK_PROPOSAL, + 0, + existingRoot, + attemptedRoot, + 'node-1', + ); expect(error.message).toContain('Slashing protection'); expect(error.message).toContain(DutyType.BLOCK_PROPOSAL.toString()); @@ -68,19 +90,40 @@ describe('SlashingProtectionError', () => { }); it('should work with ATTESTATION duty type', () => { - const error = new SlashingProtectionError(200n, DutyType.ATTESTATION, existingRoot, attemptedRoot); + const error = new SlashingProtectionError( + SlotNumber(200), + DutyType.ATTESTATION, + -1, + existingRoot, + attemptedRoot, + 'node-2', + ); expect(error.message).toContain(DutyType.ATTESTATION.toString()); expect(error.message).toContain('slot 200'); }); it('should work with ATTESTATIONS_AND_SIGNERS duty type', () => { - const error = new SlashingProtectionError(300n, DutyType.ATTESTATIONS_AND_SIGNERS, existingRoot, attemptedRoot); + const error = new SlashingProtectionError( + SlotNumber(300), + DutyType.ATTESTATIONS_AND_SIGNERS, + -1, + existingRoot, + attemptedRoot, + 'node-3', + ); expect(error.message).toContain(DutyType.ATTESTATIONS_AND_SIGNERS.toString()); expect(error.message).toContain('slot 300'); }); it('should preserve full signing roots in properties', () => { - const error = new SlashingProtectionError(100n, DutyType.BLOCK_PROPOSAL, existingRoot, attemptedRoot); + const error = new SlashingProtectionError( + SlotNumber(100), + DutyType.BLOCK_PROPOSAL, + 0, + existingRoot, + attemptedRoot, + 'node-1', + ); expect(error.existingMessageHash).toBe(existingRoot); expect(error.attemptedMessageHash).toBe(attemptedRoot); // Full roots should be in properties, not just truncated in message diff --git a/yarn-project/validator-ha-signer/src/errors.ts b/yarn-project/validator-ha-signer/src/errors.ts index 9ea1f35e0c2e..4cf796dd8a84 100644 --- a/yarn-project/validator-ha-signer/src/errors.ts +++ b/yarn-project/validator-ha-signer/src/errors.ts @@ -1,6 +1,8 @@ /** * Custom errors for the validator HA signer */ +import type { SlotNumber } from '@aztec/foundation/branded-types'; + import type { DutyType } from './db/types.js'; /** @@ -10,8 +12,9 @@ import type { DutyType } from './db/types.js'; */ export class DutyAlreadySignedError extends Error { constructor( - public readonly slot: bigint, + public readonly slot: SlotNumber, public readonly dutyType: DutyType, + public readonly blockIndexWithinCheckpoint: number, public readonly signedByNode: string, ) { super(`Duty ${dutyType} for slot ${slot} already signed by node ${signedByNode}`); @@ -28,10 +31,12 @@ export class DutyAlreadySignedError extends Error { */ export class SlashingProtectionError extends Error { constructor( - public readonly slot: bigint, + public readonly slot: SlotNumber, public readonly dutyType: DutyType, + public readonly blockIndexWithinCheckpoint: number, public readonly existingMessageHash: string, public readonly attemptedMessageHash: string, + public readonly signedByNode: string, ) { super( `Slashing protection: ${dutyType} for slot ${slot} was already signed with different data. ` + diff --git a/yarn-project/validator-ha-signer/src/factory.ts b/yarn-project/validator-ha-signer/src/factory.ts index 1b1810a0d6ae..d6beb52dc0f8 100644 --- a/yarn-project/validator-ha-signer/src/factory.ts +++ b/yarn-project/validator-ha-signer/src/factory.ts @@ -3,7 +3,7 @@ */ import { Pool } from 'pg'; -import type { CreateHASignerConfig } from './config.js'; +import type { ValidatorHASignerConfig } from './config.js'; import { PostgresSlashingProtectionDatabase } from './db/postgres.js'; import type { CreateHASignerDeps, SlashingProtectionDatabase } from './types.js'; import { ValidatorHASigner } from './validator_ha_signer.js'; @@ -23,7 +23,7 @@ import { ValidatorHASigner } from './validator_ha_signer.js'; * ```typescript * const { signer, db } = await createHASigner({ * databaseUrl: process.env.DATABASE_URL, - * enabled: true, + * haSigningEnabled: true, * nodeId: 'validator-node-1', * pollingIntervalMs: 100, * signingTimeoutMs: 3000, @@ -35,23 +35,15 @@ import { ValidatorHASigner } from './validator_ha_signer.js'; * await signer.stop(); // On shutdown * ``` * - * Example with automatic migrations (simpler for dev/testing): - * ```typescript - * const { signer, db } = await createHASigner({ - * databaseUrl: process.env.DATABASE_URL, - * enabled: true, - * nodeId: 'validator-node-1', - * runMigrations: true, // Auto-run migrations on startup - * }); - * signer.start(); - * ``` + * Note: Migrations must be run separately using `aztec migrate-ha-db up` before + * creating the signer. The factory will verify the schema is initialized via `db.initialize()`. * * @param config - Configuration for the HA signer * @param deps - Optional dependencies (e.g., for testing) * @returns An object containing the signer and database instances */ export async function createHASigner( - config: CreateHASignerConfig, + config: ValidatorHASignerConfig, deps?: CreateHASignerDeps, ): Promise<{ signer: ValidatorHASigner; @@ -60,6 +52,9 @@ export async function createHASigner( const { databaseUrl, poolMaxCount, poolMinCount, poolIdleTimeoutMs, poolConnectionTimeoutMs, ...signerConfig } = config; + if (!databaseUrl) { + throw new Error('databaseUrl is required for createHASigner'); + } // Create connection pool (or use provided pool) let pool: Pool; if (!deps?.pool) { diff --git a/yarn-project/validator-ha-signer/src/migrations.ts b/yarn-project/validator-ha-signer/src/migrations.ts index fda3a95efc5e..46c2db013f20 100644 --- a/yarn-project/validator-ha-signer/src/migrations.ts +++ b/yarn-project/validator-ha-signer/src/migrations.ts @@ -3,6 +3,7 @@ */ import { createLogger } from '@aztec/foundation/log'; +import { readdirSync } from 'fs'; import { runner } from 'node-pg-migrate'; import { dirname, join } from 'path'; import { fileURLToPath } from 'url'; @@ -30,17 +31,32 @@ export async function runMigrations(databaseUrl: string, options: RunMigrationsO const log = createLogger('validator-ha-signer:migrations'); + const migrationsDir = join(__dirname, 'db', 'migrations'); + try { log.info(`Running migrations ${direction}...`); + // Filter out .d.ts and .d.ts.map files - node-pg-migrate only needs .js files + const migrationFiles = readdirSync(migrationsDir); + const jsMigrationFiles = migrationFiles.filter( + file => file.endsWith('.js') && !file.endsWith('.d.ts') && !file.endsWith('.d.ts.map'), + ); + + if (jsMigrationFiles.length === 0) { + log.info('No migration files found'); + return []; + } + const appliedMigrations = await runner({ databaseUrl, - dir: join(__dirname, 'db', 'migrations'), + dir: migrationsDir, direction, migrationsTable: 'pgmigrations', count: direction === 'down' ? 1 : Infinity, verbose, log: msg => (verbose ? log.info(msg) : log.debug(msg)), + // Ignore TypeScript declaration files - node-pg-migrate will try to import them otherwise + ignorePattern: '.*\\.d\\.(ts|js)$|.*\\.d\\.ts\\.map$', }); if (appliedMigrations.length === 0) { diff --git a/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts b/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts index 0d38d2667245..776f2c02e08b 100644 --- a/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts +++ b/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts @@ -1,3 +1,4 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { sleep } from '@aztec/foundation/sleep'; @@ -9,12 +10,13 @@ import { setupTestSchema } from './db/test_helper.js'; import { DutyAlreadySignedError, SlashingProtectionError } from './errors.js'; import { SlashingProtectionService } from './slashing_protection_service.js'; import { Pool } from './test/pglite_pool.js'; -import { type CheckAndRecordParams, DutyStatus, DutyType, type SlashingProtectionConfig } from './types.js'; +import { type CheckAndRecordParams, DutyStatus, DutyType, type ValidatorHASignerConfig } from './types.js'; // Test data constants const VALIDATOR_ADDRESS = EthAddress.random(); -const SLOT = 100n; -const BLOCK_NUMBER = 50n; +const SLOT = SlotNumber(100); +const BLOCK_NUMBER = BlockNumber(50); +const BLOCK_INDEX_WITHIN_CHECKPOINT = 0; const DUTY_TYPE: DutyType = DutyType.BLOCK_PROPOSAL; const MESSAGE_HASH = Buffer32.random().toString(); const MESSAGE_HASH_2 = Buffer32.random().toString(); @@ -27,7 +29,7 @@ describe('SlashingProtectionService', () => { let pool: Pool; let db: PostgresSlashingProtectionDatabase; let service: SlashingProtectionService; - let config: SlashingProtectionConfig; + let config: ValidatorHASignerConfig; beforeEach(async () => { pglite = new PGlite(); @@ -38,7 +40,7 @@ describe('SlashingProtectionService', () => { await db.initialize(); config = { - enabled: true, + haSigningEnabled: true, nodeId: NODE_ID, pollingIntervalMs: 50, signingTimeoutMs: 1000, @@ -58,6 +60,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -78,6 +81,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -88,6 +92,7 @@ describe('SlashingProtectionService', () => { await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -104,6 +109,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -114,6 +120,7 @@ describe('SlashingProtectionService', () => { await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -130,6 +137,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -140,6 +148,7 @@ describe('SlashingProtectionService', () => { await service.deleteDuty({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, lockToken, }); @@ -159,6 +168,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -176,6 +186,7 @@ describe('SlashingProtectionService', () => { await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -191,6 +202,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -208,6 +220,7 @@ describe('SlashingProtectionService', () => { await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -223,6 +236,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -235,6 +249,7 @@ describe('SlashingProtectionService', () => { await service.deleteDuty({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, lockToken, }); @@ -259,6 +274,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -282,6 +298,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -291,6 +308,7 @@ describe('SlashingProtectionService', () => { const success = await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -310,6 +328,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -319,6 +338,7 @@ describe('SlashingProtectionService', () => { const success = await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: NODE_ID, @@ -338,6 +358,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -347,6 +368,7 @@ describe('SlashingProtectionService', () => { const success = await service.deleteDuty({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, lockToken, }); @@ -362,6 +384,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, @@ -371,6 +394,7 @@ describe('SlashingProtectionService', () => { const success = await service.deleteDuty({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, lockToken: 'wrong-token', }); @@ -388,6 +412,7 @@ describe('SlashingProtectionService', () => { validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: 'node-1', @@ -405,6 +430,7 @@ describe('SlashingProtectionService', () => { await service.recordSuccess({ validatorAddress: VALIDATOR_ADDRESS, slot: SLOT, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, dutyType: DUTY_TYPE, signature: { toString: () => SIGNATURE } as any, nodeId: winner.nodeId, @@ -435,11 +461,12 @@ describe('SlashingProtectionService', () => { for (let i = 0; i < 5; i++) { const params: CheckAndRecordParams = { validatorAddress: VALIDATOR_ADDRESS, - slot: BigInt(100 + i), - blockNumber: BigInt(50 + i), + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, + blockIndexWithinCheckpoint: i, }; promises.push(service.checkAndRecord(params)); } @@ -450,11 +477,12 @@ describe('SlashingProtectionService', () => { for (let i = 0; i < 5; i++) { const result = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: BigInt(100 + i), - blockNumber: BigInt(50 + i), + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, + blockIndexWithinCheckpoint: i, }); expect(result.isNew).toBe(false); expect(result.record.status).toBe(DutyStatus.SIGNING); @@ -484,6 +512,7 @@ describe('SlashingProtectionService', () => { dutyType: DUTY_TYPE, messageHash: MESSAGE_HASH, nodeId: NODE_ID, + blockIndexWithinCheckpoint: BLOCK_INDEX_WITHIN_CHECKPOINT, }; // Insert a duty that will be "stuck" diff --git a/yarn-project/validator-ha-signer/src/slashing_protection_service.ts b/yarn-project/validator-ha-signer/src/slashing_protection_service.ts index a60e1cd4dab9..a7c6ecda7d57 100644 --- a/yarn-project/validator-ha-signer/src/slashing_protection_service.ts +++ b/yarn-project/validator-ha-signer/src/slashing_protection_service.ts @@ -8,9 +8,15 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; -import { type CheckAndRecordParams, type DeleteDutyParams, DutyStatus, type RecordSuccessParams } from './db/types.js'; +import { + type CheckAndRecordParams, + type DeleteDutyParams, + DutyStatus, + type RecordSuccessParams, + getBlockIndexFromDutyIdentifier, +} from './db/types.js'; import { DutyAlreadySignedError, SlashingProtectionError } from './errors.js'; -import type { SlashingProtectionConfig, SlashingProtectionDatabase } from './types.js'; +import type { SlashingProtectionDatabase, ValidatorHASignerConfig } from './types.js'; /** * Slashing Protection Service @@ -31,21 +37,24 @@ export class SlashingProtectionService { private readonly log: Logger; private readonly pollingIntervalMs: number; private readonly signingTimeoutMs: number; + private readonly maxStuckDutiesAgeMs: number; private cleanupRunningPromise: RunningPromise; constructor( private readonly db: SlashingProtectionDatabase, - private readonly config: SlashingProtectionConfig, + private readonly config: ValidatorHASignerConfig, ) { this.log = createLogger('slashing-protection'); this.pollingIntervalMs = config.pollingIntervalMs; this.signingTimeoutMs = config.signingTimeoutMs; + // Default to 144s (2x 72s Aztec slot duration) if not explicitly configured + this.maxStuckDutiesAgeMs = config.maxStuckDutiesAgeMs ?? 144_000; this.cleanupRunningPromise = new RunningPromise( this.cleanupStuckDuties.bind(this), this.log, - this.config.maxStuckDutiesAgeMs, + this.maxStuckDutiesAgeMs, ); } @@ -98,9 +107,16 @@ export class SlashingProtectionService { existingNodeId: record.nodeId, attemptingNodeId: nodeId, }); - throw new SlashingProtectionError(slot, dutyType, record.messageHash, messageHash); + throw new SlashingProtectionError( + slot, + dutyType, + record.blockIndexWithinCheckpoint, + record.messageHash, + messageHash, + record.nodeId, + ); } - throw new DutyAlreadySignedError(slot, dutyType, record.nodeId); + throw new DutyAlreadySignedError(slot, dutyType, record.blockIndexWithinCheckpoint, record.nodeId); } else if (record.status === DutyStatus.SIGNING) { // Another node is currently signing - check for timeout if (Date.now() - startTime > this.signingTimeoutMs) { @@ -109,7 +125,7 @@ export class SlashingProtectionService { timeoutMs: this.signingTimeoutMs, signingNodeId: record.nodeId, }); - throw new DutyAlreadySignedError(slot, dutyType, 'unknown (timeout)'); + throw new DutyAlreadySignedError(slot, dutyType, record.blockIndexWithinCheckpoint, 'unknown (timeout)'); } // Wait and poll @@ -134,8 +150,16 @@ export class SlashingProtectionService { */ async recordSuccess(params: RecordSuccessParams): Promise { const { validatorAddress, slot, dutyType, signature, nodeId, lockToken } = params; - - const success = await this.db.updateDutySigned(validatorAddress, slot, dutyType, signature.toString(), lockToken); + const blockIndexWithinCheckpoint = getBlockIndexFromDutyIdentifier(params); + + const success = await this.db.updateDutySigned( + validatorAddress, + slot, + dutyType, + signature.toString(), + lockToken, + blockIndexWithinCheckpoint, + ); if (success) { this.log.info(`Recorded successful signing for duty ${dutyType} at slot ${slot}`, { @@ -161,8 +185,9 @@ export class SlashingProtectionService { */ async deleteDuty(params: DeleteDutyParams): Promise { const { validatorAddress, slot, dutyType, lockToken } = params; + const blockIndexWithinCheckpoint = getBlockIndexFromDutyIdentifier(params); - const success = await this.db.deleteDuty(validatorAddress, slot, dutyType, lockToken); + const success = await this.db.deleteDuty(validatorAddress, slot, dutyType, lockToken, blockIndexWithinCheckpoint); if (success) { this.log.info(`Deleted duty ${dutyType} at slot ${slot} to allow retry`, { @@ -201,15 +226,24 @@ export class SlashingProtectionService { this.log.info('Slashing protection service stopped', { nodeId: this.config.nodeId }); } + /** + * Close the database connection. + * Should be called after stop() during graceful shutdown. + */ + async close() { + await this.db.close(); + this.log.info('Slashing protection database connection closed'); + } + /** * Cleanup own stuck duties */ private async cleanupStuckDuties() { - const numDuties = await this.db.cleanupOwnStuckDuties(this.config.nodeId, this.config.maxStuckDutiesAgeMs); + const numDuties = await this.db.cleanupOwnStuckDuties(this.config.nodeId, this.maxStuckDutiesAgeMs); if (numDuties > 0) { this.log.info(`Cleaned up ${numDuties} stuck duties`, { nodeId: this.config.nodeId, - maxStuckDutiesAgeMs: this.config.maxStuckDutiesAgeMs, + maxStuckDutiesAgeMs: this.maxStuckDutiesAgeMs, }); } } diff --git a/yarn-project/validator-ha-signer/src/types.test.ts b/yarn-project/validator-ha-signer/src/types.test.ts new file mode 100644 index 000000000000..a7ea62c0d844 --- /dev/null +++ b/yarn-project/validator-ha-signer/src/types.test.ts @@ -0,0 +1,132 @@ +import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; + +import { describe, expect, it } from '@jest/globals'; + +import { DutyType } from './db/types.js'; +import { + type BlockProposalSigningContext, + type HAProtectedSigningContext, + type OtherSigningContext, + type VoteSigningContext, + getBlockNumberFromSigningContext, +} from './types.js'; + +describe('getBlockNumberFromSigningContext', () => { + describe('duties with blockNumber', () => { + it('should return blockNumber for BLOCK_PROPOSAL', () => { + const context: BlockProposalSigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(50)); + }); + + it('should return blockNumber for CHECKPOINT_PROPOSAL', () => { + const context: OtherSigningContext = { + slot: SlotNumber(100), + blockNumber: CheckpointNumber(25), + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(CheckpointNumber(25)); + }); + + it('should return blockNumber for ATTESTATION', () => { + const context: OtherSigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATION, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(50)); + }); + + it('should return blockNumber for ATTESTATIONS_AND_SIGNERS', () => { + const context: OtherSigningContext = { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(50)); + }); + + it('should handle large block numbers', () => { + const largeBlockNumber = BlockNumber(Number.MAX_SAFE_INTEGER); + const context: BlockProposalSigningContext = { + slot: SlotNumber(100), + blockNumber: largeBlockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(largeBlockNumber); + }); + }); + + describe('vote duties (no blockNumber in context)', () => { + it('should return BlockNumber(0) for GOVERNANCE_VOTE', () => { + const context: VoteSigningContext = { + slot: SlotNumber(100), + dutyType: DutyType.GOVERNANCE_VOTE, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(0)); + }); + + it('should return BlockNumber(0) for SLASHING_VOTE', () => { + const context: VoteSigningContext = { + slot: SlotNumber(100), + dutyType: DutyType.SLASHING_VOTE, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(0)); + }); + + it('should handle slot 0 for vote duties', () => { + const context: VoteSigningContext = { + slot: SlotNumber(0), + dutyType: DutyType.GOVERNANCE_VOTE, + }; + expect(getBlockNumberFromSigningContext(context)).toBe(BlockNumber(0)); + }); + }); + + describe('type safety', () => { + it('should accept all HAProtectedSigningContext types', () => { + const contexts: HAProtectedSigningContext[] = [ + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATION, + }, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }, + { + slot: SlotNumber(100), + blockNumber: CheckpointNumber(25), + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }, + { + slot: SlotNumber(100), + dutyType: DutyType.GOVERNANCE_VOTE, + }, + { + slot: SlotNumber(100), + dutyType: DutyType.SLASHING_VOTE, + }, + ]; + + // All should return a valid block number without throwing + for (const context of contexts) { + const result = getBlockNumberFromSigningContext(context); + expect(typeof result).toBe('number'); + } + }); + }); +}); diff --git a/yarn-project/validator-ha-signer/src/types.ts b/yarn-project/validator-ha-signer/src/types.ts index 5d2e4c4a0262..9b9790ec66d8 100644 --- a/yarn-project/validator-ha-signer/src/types.ts +++ b/yarn-project/validator-ha-signer/src/types.ts @@ -1,27 +1,31 @@ +import { BlockNumber, type CheckpointNumber, type SlotNumber } from '@aztec/foundation/branded-types'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Pool } from 'pg'; -import type { CreateHASignerConfig, SlashingProtectionConfig } from './config.js'; -import type { - CheckAndRecordParams, - DeleteDutyParams, - DutyIdentifier, +import type { ValidatorHASignerConfig } from './config.js'; +import { + type BlockProposalDutyIdentifier, + type CheckAndRecordParams, + type DeleteDutyParams, + type DutyIdentifier, DutyType, - RecordSuccessParams, - ValidatorDutyRecord, + type OtherDutyIdentifier, + type RecordSuccessParams, + type ValidatorDutyRecord, } from './db/types.js'; export type { + BlockProposalDutyIdentifier, CheckAndRecordParams, - CreateHASignerConfig, DeleteDutyParams, DutyIdentifier, + OtherDutyIdentifier, RecordSuccessParams, - SlashingProtectionConfig, ValidatorDutyRecord, + ValidatorHASignerConfig, }; -export { DutyStatus, DutyType } from './db/types.js'; +export { DutyStatus, DutyType, getBlockIndexFromDutyIdentifier, normalizeBlockIndex } from './db/types.js'; /** * Result of tryInsertOrGetExisting operation @@ -45,17 +49,97 @@ export interface CreateHASignerDeps { } /** - * Context required for slashing protection during signing operations + * Base context for signing operations */ -export interface SigningContext { +interface BaseSigningContext { /** Slot number for this duty */ - slot: bigint; - /** Block number for this duty */ - blockNumber: bigint; - /** Type of duty being performed */ - dutyType: DutyType; + slot: SlotNumber; + /** + * Block or checkpoint number for this duty. + * For block proposals, this is the block number. + * For checkpoint proposals, this is the checkpoint number. + */ + blockNumber: BlockNumber | CheckpointNumber; +} + +/** + * Signing context for block proposals. + * blockIndexWithinCheckpoint is REQUIRED and must be >= 0. + */ +export interface BlockProposalSigningContext extends BaseSigningContext { + /** Block index within checkpoint (0, 1, 2...). Required for block proposals. */ + blockIndexWithinCheckpoint: number; + dutyType: DutyType.BLOCK_PROPOSAL; +} + +/** + * Signing context for non-block-proposal duties that require HA protection. + * blockIndexWithinCheckpoint is not applicable (internally always -1). + */ +export interface OtherSigningContext extends BaseSigningContext { + dutyType: DutyType.CHECKPOINT_PROPOSAL | DutyType.ATTESTATION | DutyType.ATTESTATIONS_AND_SIGNERS; +} + +/** + * Signing context for governance/slashing votes which only need slot for HA protection. + * blockNumber is not applicable (internally always 0). + */ +export interface VoteSigningContext { + slot: SlotNumber; + dutyType: DutyType.GOVERNANCE_VOTE | DutyType.SLASHING_VOTE; +} + +/** + * Signing context for duties which don't require slot/blockNumber + * as they don't need HA protection (AUTH_REQUEST, TXS). + */ +export interface NoHAProtectionSigningContext { + dutyType: DutyType.AUTH_REQUEST | DutyType.TXS; +} + +/** + * Signing contexts that require HA protection (excludes AUTH_REQUEST). + * Used by the HA signer's signWithProtection method. + */ +export type HAProtectedSigningContext = BlockProposalSigningContext | OtherSigningContext | VoteSigningContext; + +/** + * Type guard to check if a SigningContext requires HA protection. + * Returns true for contexts that need HA protection, false for AUTH_REQUEST and TXS. + */ +export function isHAProtectedContext(context: SigningContext): context is HAProtectedSigningContext { + return context.dutyType !== DutyType.AUTH_REQUEST && context.dutyType !== DutyType.TXS; +} + +/** + * Gets the block number from a signing context. + * - Vote duties (GOVERNANCE_VOTE, SLASHING_VOTE): returns BlockNumber(0) + * - Other duties: returns the blockNumber from the context + */ +export function getBlockNumberFromSigningContext(context: HAProtectedSigningContext): BlockNumber | CheckpointNumber { + // Check for duty types that have blockNumber + if ( + context.dutyType === DutyType.BLOCK_PROPOSAL || + context.dutyType === DutyType.CHECKPOINT_PROPOSAL || + context.dutyType === DutyType.ATTESTATION || + context.dutyType === DutyType.ATTESTATIONS_AND_SIGNERS + ) { + return context.blockNumber; + } + // Vote duties (GOVERNANCE_VOTE, SLASHING_VOTE) don't have blockNumber + return BlockNumber(0); } +/** + * Context required for slashing protection during signing operations. + * Uses discriminated union to enforce type safety: + * - BLOCK_PROPOSAL duties MUST have blockIndexWithinCheckpoint >= 0 + * - Other duty types do NOT have blockIndexWithinCheckpoint (internally -1) + * - Vote duties only need slot (blockNumber is internally 0) + * - AUTH_REQUEST and TXS duties don't need slot/blockNumber (no HA protection needed) + */ +export type SigningContext = HAProtectedSigningContext | NoHAProtectionSigningContext; + /** * Database interface for slashing protection operations * This abstraction allows for different database implementations (PostgreSQL, SQLite, etc.) @@ -82,10 +166,11 @@ export interface SlashingProtectionDatabase { */ updateDutySigned( validatorAddress: EthAddress, - slot: bigint, + slot: SlotNumber, dutyType: DutyType, signature: string, lockToken: string, + blockIndexWithinCheckpoint: number, ): Promise; /** @@ -95,11 +180,23 @@ export interface SlashingProtectionDatabase { * * @returns true if the delete succeeded, false if token didn't match or duty not found */ - deleteDuty(validatorAddress: EthAddress, slot: bigint, dutyType: DutyType, lockToken: string): Promise; + deleteDuty( + validatorAddress: EthAddress, + slot: SlotNumber, + dutyType: DutyType, + lockToken: string, + blockIndexWithinCheckpoint: number, + ): Promise; /** * Cleanup own stuck duties * @returns the number of duties cleaned up */ cleanupOwnStuckDuties(nodeId: string, maxAgeMs: number): Promise; + + /** + * Close the database connection. + * Should be called during graceful shutdown. + */ + close(): Promise; } diff --git a/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts b/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts index 9b7b11f853d8..998141a30293 100644 --- a/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts +++ b/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts @@ -1,3 +1,4 @@ +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; @@ -6,7 +7,7 @@ import { sleep } from '@aztec/foundation/sleep'; import { PGlite } from '@electric-sql/pglite'; import { afterEach, beforeEach, describe, expect, it, jest } from '@jest/globals'; -import { type CreateHASignerConfig, defaultSlashingProtectionConfig } from './config.js'; +import { type ValidatorHASignerConfig, defaultValidatorHASignerConfig } from './config.js'; import { PostgresSlashingProtectionDatabase } from './db/postgres.js'; import { setupTestSchema } from './db/test_helper.js'; import { DutyStatus, DutyType } from './db/types.js'; @@ -30,7 +31,7 @@ describe('ValidatorHASigner', () => { let pglite: PGlite; let pool: Pool; let db: PostgresSlashingProtectionDatabase; - let config: CreateHASignerConfig; + let config: ValidatorHASignerConfig; beforeEach(async () => { pglite = new PGlite(); @@ -41,7 +42,7 @@ describe('ValidatorHASigner', () => { await db.initialize(); config = { - enabled: true, + haSigningEnabled: true, nodeId: NODE_ID, pollingIntervalMs: 50, signingTimeoutMs: 1000, @@ -55,22 +56,20 @@ describe('ValidatorHASigner', () => { }); describe('initialization', () => { - it('should initialize with slashing protection enabled', () => { - const signer = new ValidatorHASigner(db, config); - expect(signer.isEnabled).toBe(true); - expect(signer.nodeId).toBe(NODE_ID); - }); - it('should not initialize when nodeId is not explicitly set', () => { - const defaultConfig = { ...defaultSlashingProtectionConfig }; + const defaultConfig = { ...defaultValidatorHASignerConfig }; expect( () => - new ValidatorHASigner(db, { ...defaultConfig, databaseUrl: 'postgresql://user:pass@localhost:5432/testdb' }), + new ValidatorHASigner(db, { + ...defaultConfig, + databaseUrl: 'postgresql://user:pass@localhost:5432/testdb', + haSigningEnabled: true, + }), ).toThrow('NODE_ID is required for high-availability setups'); }); it('should not initialize when enabled is false', () => { - const disabledConfig = { ...config, enabled: false }; + const disabledConfig = { ...config, haSigningEnabled: false }; expect(() => new ValidatorHASigner(db, disabledConfig)).toThrow('Validator HA Signer is not enabled in config'); }); }); @@ -103,9 +102,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -117,8 +117,9 @@ describe('ValidatorHASigner', () => { // Verify duty was recorded const dutyResult = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + blockIndexWithinCheckpoint: 0, dutyType: DutyType.BLOCK_PROPOSAL, messageHash: MESSAGE_HASH.toString(), nodeId: NODE_ID, @@ -137,9 +138,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ), @@ -148,9 +150,10 @@ describe('ValidatorHASigner', () => { // Verify duty was deleted const dutyResult = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, messageHash: MESSAGE_HASH.toString(), nodeId: NODE_ID, }); @@ -163,9 +166,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -176,9 +180,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ), @@ -194,9 +199,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -207,9 +213,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH_2, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ), @@ -226,9 +233,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -238,8 +246,8 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, messageHash, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.ATTESTATION, }, signFn, @@ -250,16 +258,17 @@ describe('ValidatorHASigner', () => { // Verify both duties exist const blockDutyResult = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, messageHash: MESSAGE_HASH.toString(), nodeId: NODE_ID, }); const attestationDutyResult = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.ATTESTATION, messageHash: messageHash.toString(), nodeId: NODE_ID, @@ -275,9 +284,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -287,27 +297,424 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 101n, - blockNumber: 51n, + slot: SlotNumber(101), + blockNumber: BlockNumber(51), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(2); + }); + + it('should allow signing different block indices within slot', async () => { + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 1, }, signFn, ); expect(signFn).toHaveBeenCalledTimes(2); + + // Verify both duties exist + const blockDutyResult = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + const blockDutyResult2 = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 1, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + expect(blockDutyResult.isNew).toBe(false); + expect(blockDutyResult2.isNew).toBe(false); + }); + + it('should allow checkpoint proposal alongside block proposals in same slot', async () => { + const slot = SlotNumber(100); + const blockNumber = BlockNumber(50); + + // Sign multiple block proposals + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot, + blockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot, + blockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 1, + }, + signFn, + ); + + // Sign checkpoint proposal (index -1, since it's not a block within checkpoint) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot, + blockNumber, + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(3); + + // Verify all three duties exist in database + const block0Result = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot, + blockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + const block1Result = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot, + blockNumber, + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 1, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + const checkpointResult = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot, + blockNumber, + dutyType: DutyType.CHECKPOINT_PROPOSAL, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + + expect(block0Result.isNew).toBe(false); + expect(block1Result.isNew).toBe(false); + expect(checkpointResult.isNew).toBe(false); + }); + + it('should reject duplicate signing for same slot, duty type, and block index', async () => { + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + // Try to sign again with same parameters - should throw + await expect( + signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ), + ).rejects.toThrow(DutyAlreadySignedError); + + // But different index should work + await expect( + signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 1, + }, + signFn, + ), + ).resolves.toBeDefined(); }); it('should handle all duty types', async () => { - const dutyTypes: DutyType[] = [DutyType.BLOCK_PROPOSAL, DutyType.ATTESTATION, DutyType.ATTESTATIONS_AND_SIGNERS]; - for (const dutyType of dutyTypes) { - await signer.signWithProtection( + // Sign BLOCK_PROPOSAL (requires blockIndexWithinCheckpoint) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + // Sign ATTESTATION (no blockIndexWithinCheckpoint) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATION, + }, + signFn, + ); + + // Sign ATTESTATIONS_AND_SIGNERS (no blockIndexWithinCheckpoint) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, + }, + signFn, + ); + + // Sign CHECKPOINT_PROPOSAL (no blockIndexWithinCheckpoint) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + blockNumber: BlockNumber(50), + dutyType: DutyType.CHECKPOINT_PROPOSAL, + }, + signFn, + ); + + // Sign GOVERNANCE_VOTE (VoteSigningContext - no blockNumber) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + dutyType: DutyType.GOVERNANCE_VOTE, + }, + signFn, + ); + + // Sign SLASHING_VOTE (VoteSigningContext - no blockNumber) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + dutyType: DutyType.SLASHING_VOTE, + }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(6); + }); + + it('should sign vote duties with VoteSigningContext (no blockNumber)', async () => { + // GOVERNANCE_VOTE only needs slot, no blockNumber + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + dutyType: DutyType.GOVERNANCE_VOTE, + }, + signFn, + ); + + // SLASHING_VOTE only needs slot, no blockNumber + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: SlotNumber(100), + dutyType: DutyType.SLASHING_VOTE, + }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(2); + + // Verify both duties were recorded with blockNumber = 0 + const governanceResult = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SlotNumber(100), + blockNumber: BlockNumber(0), // getBlockNumberFromSigningContext returns 0 for vote duties + dutyType: DutyType.GOVERNANCE_VOTE, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + const slashingResult = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: SlotNumber(100), + blockNumber: BlockNumber(0), + dutyType: DutyType.SLASHING_VOTE, + messageHash: MESSAGE_HASH.toString(), + nodeId: NODE_ID, + }); + + expect(governanceResult.isNew).toBe(false); + expect(governanceResult.record.status).toBe(DutyStatus.SIGNED); + expect(slashingResult.isNew).toBe(false); + expect(slashingResult.record.status).toBe(DutyStatus.SIGNED); + }); + + it('should prevent duplicate governance votes for same slot', async () => { + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, + signFn, + ); + + await expect( + signer.signWithProtection( VALIDATOR_ADDRESS, MESSAGE_HASH, - { slot: 100n, blockNumber: 50n, dutyType }, + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, signFn, - ); - } - expect(signFn).toHaveBeenCalledTimes(dutyTypes.length); + ), + ).rejects.toThrow(DutyAlreadySignedError); + + expect(signFn).toHaveBeenCalledTimes(1); + }); + + it('should prevent duplicate slashing votes for same slot', async () => { + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.SLASHING_VOTE }, + signFn, + ); + + await expect( + signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.SLASHING_VOTE }, + signFn, + ), + ).rejects.toThrow(DutyAlreadySignedError); + + expect(signFn).toHaveBeenCalledTimes(1); + }); + + it('should trigger SlashingProtectionError for vote duties with different message hash', async () => { + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, + signFn, + ); + + await expect( + signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH_2, // Different message hash + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, + signFn, + ), + ).rejects.toThrow(SlashingProtectionError); + + expect(signFn).toHaveBeenCalledTimes(1); + }); + + it('should allow different vote types for same slot', async () => { + // Sign governance vote + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.GOVERNANCE_VOTE }, + signFn, + ); + + // Sign slashing vote for same slot - should succeed (different duty type) + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot: SlotNumber(100), dutyType: DutyType.SLASHING_VOTE }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(2); + }); + + it('should allow vote duties alongside block proposals in same slot', async () => { + const slot = SlotNumber(100); + + // Sign block proposal + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot, + blockNumber: BlockNumber(50), + dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, + }, + signFn, + ); + + // Sign governance vote for same slot + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot, dutyType: DutyType.GOVERNANCE_VOTE }, + signFn, + ); + + // Sign slashing vote for same slot + await signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { slot, dutyType: DutyType.SLASHING_VOTE }, + signFn, + ); + + expect(signFn).toHaveBeenCalledTimes(3); }); it('should handle multiple validator addresses', async () => { @@ -319,9 +726,10 @@ describe('ValidatorHASigner', () => { validator1, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -330,9 +738,10 @@ describe('ValidatorHASigner', () => { validator2, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, signFn, ); @@ -340,6 +749,87 @@ describe('ValidatorHASigner', () => { expect(signFn).toHaveBeenCalledTimes(2); }); + it('should allow only one signer to succeed when multiple signers for same validator try to sign the same duty', async () => { + const numSigners = 10; + const nodeIds = Array.from({ length: numSigners }, (_, i) => `node-${i + 1}`); + + // Create separate signers with different node IDs for the same validator + const signers = nodeIds.map(nodeId => new ValidatorHASigner(db, { ...config, nodeId })); + + // Start all signers + signers.forEach(signer => signer.start()); + + try { + // All signers try to sign the same duty for the same validator + const sameSlot = SlotNumber(200); + const sameBlockNumber = BlockNumber(100); + const sameDutyType = DutyType.BLOCK_PROPOSAL; + const sameBlockIndex = 0; + + // Create signing functions for each signer + const signFns = nodeIds.map(() => { + const signFn = jest.fn<(messageHash: Buffer32) => Promise>(); + signFn.mockResolvedValue(mockSignature); + return signFn; + }); + + // All signers try to sign concurrently for the same validator + const results = await Promise.allSettled( + signers.map((signer, index) => + signer.signWithProtection( + VALIDATOR_ADDRESS, + MESSAGE_HASH, + { + slot: sameSlot, + blockNumber: sameBlockNumber, + dutyType: sameDutyType, + blockIndexWithinCheckpoint: sameBlockIndex, + }, + signFns[index], + ), + ), + ); + + // Exactly one should succeed + const successful = results.filter(r => r.status === 'fulfilled'); + const failed = results.filter(r => r.status === 'rejected'); + + expect(successful.length).toBe(1); + expect(failed.length).toBe(9); + + // All failures should be DutyAlreadySignedError + for (const failure of failed) { + if (failure.status === 'rejected') { + expect(failure.reason).toBeInstanceOf(DutyAlreadySignedError); + } + } + + // Only one signing function should have been called + const totalCalls = signFns.reduce((sum, fn) => sum + fn.mock.calls.length, 0); + expect(totalCalls).toBe(1); + + // Verify the duty is recorded in the database with the winning nodeId + const dutyResult = await db.tryInsertOrGetExisting({ + validatorAddress: VALIDATOR_ADDRESS, + slot: sameSlot, + blockNumber: sameBlockNumber, + dutyType: sameDutyType, + blockIndexWithinCheckpoint: sameBlockIndex, + messageHash: MESSAGE_HASH.toString(), + // The record should exist already here, so tryInsertOrGetExisting with any nodeId + // should return the same record + nodeId: nodeIds[0], + }); + expect(dutyResult.isNew).toBe(false); + expect(dutyResult.record.status).toBe(DutyStatus.SIGNED); + // The winning nodeId should be one of the ten + expect(nodeIds).toContain(dutyResult.record.nodeId); + } finally { + // Stop all signers + await Promise.all(signers.map(signer => signer.stop())); + } + }); + it('should handle concurrent signing attempts - first succeeds', async () => { const localSignFn = jest.fn<(messageHash: Buffer32) => Promise>(); @@ -354,9 +844,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, localSignFn, ); @@ -369,9 +860,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, localSignFn, ); @@ -403,9 +895,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, localSignFn, ); @@ -418,9 +911,10 @@ describe('ValidatorHASigner', () => { VALIDATOR_ADDRESS, MESSAGE_HASH, { - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, }, localSignFn, ); @@ -437,9 +931,10 @@ describe('ValidatorHASigner', () => { // Verify the duty is marked as signed by the second signer const dutyResult = await db.tryInsertOrGetExisting({ validatorAddress: VALIDATOR_ADDRESS, - slot: 100n, - blockNumber: 50n, + slot: SlotNumber(100), + blockNumber: BlockNumber(50), dutyType: DutyType.BLOCK_PROPOSAL, + blockIndexWithinCheckpoint: 0, messageHash: MESSAGE_HASH.toString(), nodeId: NODE_ID, }); diff --git a/yarn-project/validator-ha-signer/src/validator_ha_signer.ts b/yarn-project/validator-ha-signer/src/validator_ha_signer.ts index c3633d8d0e20..6e2080927069 100644 --- a/yarn-project/validator-ha-signer/src/validator_ha_signer.ts +++ b/yarn-project/validator-ha-signer/src/validator_ha_signer.ts @@ -10,9 +10,14 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; -import type { CreateHASignerConfig } from './config.js'; +import type { ValidatorHASignerConfig } from './config.js'; +import { type DutyIdentifier, DutyType } from './db/types.js'; import { SlashingProtectionService } from './slashing_protection_service.js'; -import type { SigningContext, SlashingProtectionDatabase } from './types.js'; +import { + type HAProtectedSigningContext, + type SlashingProtectionDatabase, + getBlockNumberFromSigningContext, +} from './types.js'; /** * Validator High Availability Signer @@ -35,15 +40,15 @@ import type { SigningContext, SlashingProtectionDatabase } from './types.js'; */ export class ValidatorHASigner { private readonly log: Logger; - private readonly slashingProtection: SlashingProtectionService | undefined; + private readonly slashingProtection: SlashingProtectionService; constructor( db: SlashingProtectionDatabase, - private readonly config: CreateHASignerConfig, + private readonly config: ValidatorHASignerConfig, ) { this.log = createLogger('validator-ha-signer'); - if (!config.enabled) { + if (!config.haSigningEnabled) { // this shouldn't happen, the validator should use different signer for non-HA setups throw new Error('Validator HA Signer is not enabled in config'); } @@ -67,7 +72,7 @@ export class ValidatorHASigner { * * @param validatorAddress - The validator's Ethereum address * @param messageHash - The hash to be signed - * @param context - The signing context (slot, block number, duty type) + * @param context - The signing context (HA-protected duty types only) * @param signFn - Function that performs the actual signing * @returns The signature * @@ -77,29 +82,30 @@ export class ValidatorHASigner { async signWithProtection( validatorAddress: EthAddress, messageHash: Buffer32, - context: SigningContext, + context: HAProtectedSigningContext, signFn: (messageHash: Buffer32) => Promise, ): Promise { - // If slashing protection is disabled, just sign directly - if (!this.slashingProtection) { - this.log.info('Signing without slashing protection enabled', { - validatorAddress: validatorAddress.toString(), - nodeId: this.config.nodeId, + let dutyIdentifier: DutyIdentifier; + if (context.dutyType === DutyType.BLOCK_PROPOSAL) { + dutyIdentifier = { + validatorAddress, + slot: context.slot, + blockIndexWithinCheckpoint: context.blockIndexWithinCheckpoint, dutyType: context.dutyType, + }; + } else { + dutyIdentifier = { + validatorAddress, slot: context.slot, - blockNumber: context.blockNumber, - }); - return await signFn(messageHash); + dutyType: context.dutyType, + }; } - const { slot, blockNumber, dutyType } = context; - // Acquire lock and get the token for ownership verification + const blockNumber = getBlockNumberFromSigningContext(context); const lockToken = await this.slashingProtection.checkAndRecord({ - validatorAddress, - slot, + ...dutyIdentifier, blockNumber, - dutyType, messageHash: messageHash.toString(), nodeId: this.config.nodeId, }); @@ -110,20 +116,13 @@ export class ValidatorHASigner { signature = await signFn(messageHash); } catch (error: any) { // Delete duty to allow retry (only succeeds if we own the lock) - await this.slashingProtection.deleteDuty({ - validatorAddress, - slot, - dutyType, - lockToken, - }); + await this.slashingProtection.deleteDuty({ ...dutyIdentifier, lockToken }); throw error; } // Record success (only succeeds if we own the lock) await this.slashingProtection.recordSuccess({ - validatorAddress, - slot, - dutyType, + ...dutyIdentifier, signature, nodeId: this.config.nodeId, lockToken, @@ -132,13 +131,6 @@ export class ValidatorHASigner { return signature; } - /** - * Check if slashing protection is enabled - */ - get isEnabled(): boolean { - return this.slashingProtection !== undefined; - } - /** * Get the node ID for this signer */ @@ -151,14 +143,15 @@ export class ValidatorHASigner { * Should be called after construction and before signing operations. */ start() { - this.slashingProtection?.start(); + this.slashingProtection.start(); } /** - * Stop the HA signer background tasks. + * Stop the HA signer background tasks and close database connection. * Should be called during graceful shutdown. */ async stop() { - await this.slashingProtection?.stop(); + await this.slashingProtection.stop(); + await this.slashingProtection.close(); } } diff --git a/yarn-project/validator-ha-signer/tsconfig.json b/yarn-project/validator-ha-signer/tsconfig.json index eb63cc029db2..63f8ab3e9f75 100644 --- a/yarn-project/validator-ha-signer/tsconfig.json +++ b/yarn-project/validator-ha-signer/tsconfig.json @@ -8,9 +8,6 @@ "references": [ { "path": "../foundation" - }, - { - "path": "../node-keystore" } ], "include": ["src"] diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 13635bf3eb84..5110acbe5899 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -821,6 +821,7 @@ __metadata: "@aztec/stdlib": "workspace:^" "@aztec/telemetry-client": "workspace:^" "@aztec/validator-client": "workspace:^" + "@aztec/validator-ha-signer": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": "npm:^30.0.0" "@types/jest": "npm:^30.0.0" @@ -1266,6 +1267,7 @@ __metadata: "@aztec/telemetry-client": "workspace:^" "@aztec/test-wallet": "workspace:^" "@aztec/validator-client": "workspace:^" + "@aztec/validator-ha-signer": "workspace:^" "@aztec/world-state": "workspace:^" "@iarna/toml": "npm:^2.2.5" "@jest/globals": "npm:^30.0.0" @@ -2032,7 +2034,9 @@ __metadata: "@aztec/stdlib": "workspace:^" "@aztec/telemetry-client": "workspace:^" "@aztec/validator-client": "workspace:^" + "@aztec/validator-ha-signer": "workspace:^" "@aztec/world-state": "workspace:^" + "@electric-sql/pglite": "npm:^0.3.14" "@jest/globals": "npm:^30.0.0" "@types/jest": "npm:^30.0.0" "@types/lodash.chunk": "npm:^4.2.7" @@ -2131,6 +2135,7 @@ __metadata: "@aztec/foundation": "workspace:^" "@aztec/l1-artifacts": "workspace:^" "@aztec/noir-noirc_abi": "portal:../../noir/packages/noirc_abi" + "@aztec/validator-ha-signer": "workspace:^" "@google-cloud/storage": "npm:^7.15.0" "@jest/expect": "npm:^30.0.0" "@jest/globals": "npm:^30.0.0" @@ -2272,6 +2277,8 @@ __metadata: "@aztec/slasher": "workspace:^" "@aztec/stdlib": "workspace:^" "@aztec/telemetry-client": "workspace:^" + "@aztec/validator-ha-signer": "workspace:^" + "@electric-sql/pglite": "npm:^0.3.14" "@jest/globals": "npm:^30.0.0" "@types/jest": "npm:^30.0.0" "@types/node": "npm:^22.15.17" @@ -2294,7 +2301,6 @@ __metadata: resolution: "@aztec/validator-ha-signer@workspace:validator-ha-signer" dependencies: "@aztec/foundation": "workspace:^" - "@aztec/node-keystore": "workspace:^" "@electric-sql/pglite": "npm:^0.3.14" "@jest/globals": "npm:^30.0.0" "@types/jest": "npm:^30.0.0" @@ -2309,6 +2315,7 @@ __metadata: ts-node: "npm:^10.9.1" tslib: "npm:^2.4.0" typescript: "npm:^5.3.3" + zod: "npm:^3.23.8" languageName: unknown linkType: soft