diff --git a/.github/workflows/pull-request-develop.yml b/.github/workflows/pull-request-develop.yml index c46ea8de7..ccfeec61e 100644 --- a/.github/workflows/pull-request-develop.yml +++ b/.github/workflows/pull-request-develop.yml @@ -80,7 +80,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: 22.9.0 + node-version: 18 cache: npm - name: "Install dependencies" diff --git a/package-lock.json b/package-lock.json index 78c5ebbcb..d311f8a4e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13261,6 +13261,11 @@ "version": "4.17.21", "license": "MIT" }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + }, "node_modules/lodash.defaults": { "version": "4.2.0", "license": "MIT", @@ -20135,7 +20140,7 @@ "license": "MIT", "dependencies": { "compute-gcd": "^1.2.1", - "lodash": "^4.17.21", + "lodash-es": "^4.17.21", "mina-fungible-token": "^1.0.0", "reflect-metadata": "^0.1.13", "ts-pattern": "^4.3.0" diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index 10b8ae4ba..1973cccdb 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -2,6 +2,7 @@ export * from "./config/ModuleContainer"; export * from "./config/ConfigurableModule"; export * from "./config/ChildContainerProvider"; export * from "./config/ChildContainerCreatable"; +export * from "./config/injectAlias"; export * from "./types"; export * from "./zkProgrammable/ZkProgrammable"; export * from "./zkProgrammable/ProvableMethodExecutionContext"; @@ -22,4 +23,3 @@ export * from "./compiling/AtomicCompileHelper"; export * from "./compiling/CompileRegistry"; export * from "./compiling/CompilableModule"; export * from "./compiling/services/ChildVerificationKeyService"; -export * from "./config/injectAlias"; diff --git a/packages/common/src/trees/RollupMerkleTree.ts b/packages/common/src/trees/RollupMerkleTree.ts index 44049e805..5257e3226 100644 --- a/packages/common/src/trees/RollupMerkleTree.ts +++ b/packages/common/src/trees/RollupMerkleTree.ts @@ -200,7 +200,7 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { public static dummy() { return new RollupMerkleWitness({ - isLeft: Array(height - 1).fill(Bool(false)), + isLeft: Array(height - 1).fill(Bool(true)), path: Array(height - 1).fill(Field(0)), }); } @@ -222,6 +222,7 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { public static WITNESS = RollupMerkleWitness; // private in interface + // TODO Cache this in some static variable so that we don't recompute it every time readonly zeroes: bigint[]; readonly store: MerkleTreeStore; diff --git a/packages/common/src/utils.ts b/packages/common/src/utils.ts index 219e44ce0..6894bef9c 100644 --- a/packages/common/src/utils.ts +++ b/packages/common/src/utils.ts @@ -5,6 +5,7 @@ import { DynamicProof, Proof, } from "o1js"; +import _ from "lodash"; import { TypedClass } from "./types"; @@ -72,6 +73,26 @@ export function reduceSequential( ); } +export function yieldSequential( + array: Source[], + callbackfn: ( + previousValue: State, + currentValue: Source, + currentIndex: number, + array: Source[] + ) => Promise<[State, Target]>, + initialValue: State +): Promise<[State, Target[]]> { + return reduceSequential( + array, + async ([state, collectedTargets], curr, index, arr) => { + const [newState, addition] = await callbackfn(state, curr, index, arr); + return [newState, collectedTargets.concat(addition)]; + }, + [initialValue, []] + ); +} + export function mapSequential( array: T[], f: (element: T, index: number, array: T[]) => Promise @@ -198,3 +219,43 @@ export function safeParseJson(json: string) { // eslint-disable-next-line @typescript-eslint/consistent-type-assertions return JSON.parse(json) as T; } + +export type Nullable = { + [Key in keyof T]: T[Key] | undefined; +}; + +export function isFull(t: Nullable): t is T { + return Object.values(t).findIndex((v) => v === undefined) === -1; +} + +// TODO Restructure utils into separate package and multiple files + +export function padArray( + array: T[], + batchSize: number, + generator: (index: number) => T +): T[] { + const slice = array.slice(); + const dummies = range(0, batchSize - (array.length % batchSize)).map((i) => + generator(i + array.length) + ); + slice.push(...dummies); + return slice; +} + +export function batch( + arr: T[], + batchSize: number, + dummy: (index: number) => T +): T[][] { + const padded = padArray(arr, batchSize, dummy); + + const partitioned = _.groupBy( + padded.map((v, i) => [v, i] as const), + ([v, i]) => Math.floor(i / batchSize) + ); + + const numBatches = Math.ceil(arr.length / batchSize); + + return range(0, numBatches).map((i) => partitioned[i].map((x) => x[0])); +} diff --git a/packages/common/src/zkProgrammable/provableMethod.ts b/packages/common/src/zkProgrammable/provableMethod.ts index aac3504d5..bb5f61832 100644 --- a/packages/common/src/zkProgrammable/provableMethod.ts +++ b/packages/common/src/zkProgrammable/provableMethod.ts @@ -31,7 +31,9 @@ export function toProver( ); if (zkProgram === undefined) { - throw new Error("Correct ZkProgram not found"); + throw new Error( + `Correct ZkProgram not found (searching for method ${methodName})` + ); } if (areProofsEnabled) { diff --git a/packages/deployment/src/queue/BullQueue.ts b/packages/deployment/src/queue/BullQueue.ts index 1377cac39..e4f83c515 100644 --- a/packages/deployment/src/queue/BullQueue.ts +++ b/packages/deployment/src/queue/BullQueue.ts @@ -51,6 +51,7 @@ export class BullQueue await this.activePromise; } let resOutside: () => void = () => {}; + // TODO Use Promise.withResolvers() for that const promise = new Promise((res) => { resOutside = res; }); diff --git a/packages/library/src/hooks/TransactionFeeHook.ts b/packages/library/src/hooks/TransactionFeeHook.ts index 871d2d0a3..dfa325b8f 100644 --- a/packages/library/src/hooks/TransactionFeeHook.ts +++ b/packages/library/src/hooks/TransactionFeeHook.ts @@ -6,11 +6,12 @@ import { } from "@proto-kit/module"; import { inject, injectable } from "tsyringe"; import { + BeforeTransactionHookArguments, ProvableTransactionHook, - BlockProverExecutionData, PublicKeyOption, } from "@proto-kit/protocol"; import { Field, Provable, PublicKey } from "o1js"; +import { noop } from "@proto-kit/common"; import { UInt64 } from "../math/UInt64"; import { Balance, TokenId } from "../runtime/Balances"; @@ -122,8 +123,8 @@ export class TransactionFeeHook extends ProvableTransactionHook { const feeConfig = Provable.witness(MethodFeeConfigData, () => this.feeAnalyzer.getFeeConfig( @@ -154,4 +155,8 @@ export class TransactionFeeHook extends ProvableTransactionHook { + noop(); + } } diff --git a/packages/module/src/method/runtimeMethod.ts b/packages/module/src/method/runtimeMethod.ts index 4688ea670..8cb1b26ce 100644 --- a/packages/module/src/method/runtimeMethod.ts +++ b/packages/module/src/method/runtimeMethod.ts @@ -1,7 +1,6 @@ import { Bool, Field, FlexibleProvablePure, Poseidon } from "o1js"; import { container } from "tsyringe"; import { - StateTransition, ProvableStateTransition, MethodPublicOutput, RuntimeMethodExecutionContext, @@ -40,7 +39,7 @@ const errors = { }; export function toStateTransitionsHash( - stateTransitions: StateTransition[] + stateTransitions: { toProvable: () => ProvableStateTransition }[] ) { const stateTransitionsHashList = new StateTransitionReductionList( ProvableStateTransition diff --git a/packages/persistance/prisma/migrations/20250129115920_st_prover/migration.sql b/packages/persistance/prisma/migrations/20250129115920_st_prover/migration.sql new file mode 100644 index 000000000..35cbac8b0 --- /dev/null +++ b/packages/persistance/prisma/migrations/20250129115920_st_prover/migration.sql @@ -0,0 +1,21 @@ +/* + Warnings: + + - You are about to drop the column `blockStateTransitions` on the `BlockResult` table. All the data in the column will be lost. + - You are about to drop the column `protocolTransitions` on the `TransactionExecutionResult` table. All the data in the column will be lost. + - Added the required column `beforeBlockStateTransitions` to the `Block` table without a default value. This is not possible if the table is not empty. + - Added the required column `fromStateRoot` to the `Block` table without a default value. This is not possible if the table is not empty. + - Added the required column `afterBlockStateTransitions` to the `BlockResult` table without a default value. This is not possible if the table is not empty. + +*/ +-- AlterTable +ALTER TABLE "Block" ADD COLUMN "beforeBlockStateTransitions" JSON NOT NULL, +ADD COLUMN "fromStateRoot" TEXT NOT NULL; + +-- AlterTable +ALTER TABLE "BlockResult" DROP COLUMN "blockStateTransitions", +ADD COLUMN "afterBlockStateTransitions" JSON NOT NULL, +ADD COLUMN "witnessedRoots" TEXT[]; + +-- AlterTable +ALTER TABLE "TransactionExecutionResult" DROP COLUMN "protocolTransitions"; diff --git a/packages/persistance/prisma/schema.prisma b/packages/persistance/prisma/schema.prisma index 5c46c2655..58c214477 100644 --- a/packages/persistance/prisma/schema.prisma +++ b/packages/persistance/prisma/schema.prisma @@ -40,11 +40,11 @@ model Transaction { } model TransactionExecutionResult { - stateTransitions Json @db.Json - protocolTransitions Json @db.Json - status Boolean - statusMessage String? - events Json @db.Json + // TODO Make StateTransitionBatch and StateTransition Table + stateTransitions Json @db.Json + status Boolean + statusMessage String? + events Json @db.Json tx Transaction @relation(fields: [txHash], references: [hash]) txHash String @id @@ -65,6 +65,9 @@ model Block { fromBlockHashRoot String fromMessagesHash String toMessagesHash String + fromStateRoot String + + beforeBlockStateTransitions Json @db.Json parentHash String? @unique parent Block? @relation("Parent", fields: [parentHash], references: [hash]) @@ -91,11 +94,12 @@ model Batch { model BlockResult { blockHash String @id @unique - stateRoot String - blockHashRoot String - afterNetworkState Json @db.Json - blockStateTransitions Json @db.Json - blockHashWitness Json @db.Json + stateRoot String + blockHashRoot String + witnessedRoots String[] + afterNetworkState Json @db.Json + afterBlockStateTransitions Json @db.Json + blockHashWitness Json @db.Json block Block? @relation(fields: [blockHash], references: [hash]) } diff --git a/packages/persistance/src/services/prisma/PrismaBlockStorage.ts b/packages/persistance/src/services/prisma/PrismaBlockStorage.ts index bbed9becc..9ee821549 100644 --- a/packages/persistance/src/services/prisma/PrismaBlockStorage.ts +++ b/packages/persistance/src/services/prisma/PrismaBlockStorage.ts @@ -1,5 +1,4 @@ import { - distinctByString, HistoricalBlockStorage, TransactionExecutionResult, Block, @@ -7,10 +6,9 @@ import { BlockQueue, BlockStorage, BlockWithResult, - BlockWithPreviousResult, BlockWithMaybeResult, } from "@proto-kit/sequencer"; -import { filterNonNull, log } from "@proto-kit/common"; +import { log } from "@proto-kit/common"; import { Prisma, TransactionExecutionResult as DBTransactionExecutionResult, @@ -108,6 +106,8 @@ export class PrismaBlockStorage await prismaClient.block.create({ data: { ...encodedBlock, + beforeBlockStateTransitions: + encodedBlock.beforeBlockStateTransitions as Prisma.InputJsonArray, beforeNetworkState: encodedBlock.beforeNetworkState as Prisma.InputJsonObject, duringNetworkState: @@ -122,8 +122,6 @@ export class PrismaBlockStorage txHash: tx.txHash, stateTransitions: tx.stateTransitions as Prisma.InputJsonArray, - protocolTransitions: - tx.protocolTransitions as Prisma.InputJsonArray, events: tx.events as Prisma.InputJsonArray, }; }), @@ -143,8 +141,8 @@ export class PrismaBlockStorage data: { afterNetworkState: encoded.afterNetworkState as Prisma.InputJsonValue, blockHashWitness: encoded.blockHashWitness as Prisma.InputJsonValue, - blockStateTransitions: - encoded.blockStateTransitions as Prisma.InputJsonValue, + afterBlockStateTransitions: + encoded.afterBlockStateTransitions as Prisma.InputJsonValue, stateRoot: encoded.stateRoot, blockHash: encoded.blockHash, @@ -198,7 +196,7 @@ export class PrismaBlockStorage return result; } - public async getNewBlocks(): Promise { + public async getNewBlocks(): Promise { const blocks = await this.connection.prismaClient.block.findMany({ where: { batch: null, @@ -209,24 +207,13 @@ export class PrismaBlockStorage tx: true, }, }, + result: true, }, orderBy: { height: Prisma.SortOrder.asc, }, }); - const blockHashes = blocks - .flatMap((block) => [block.parentHash, block.hash]) - .filter(filterNonNull) - .filter(distinctByString); - const result = await this.connection.prismaClient.blockResult.findMany({ - where: { - blockHash: { - in: blockHashes, - }, - }, - }); - return blocks.map((block, index) => { const transactions = block.transactions.map( (txresult) => { @@ -236,28 +223,17 @@ export class PrismaBlockStorage const decodedBlock = this.blockMapper.mapIn(block); decodedBlock.transactions = transactions; - const correspondingResult = result.find( - (candidate) => candidate.blockHash === block.hash - ); + const { result } = block; - if (correspondingResult === undefined) { + if (result === null) { throw new Error( `No BlockResult has been set for block ${block.hash} yet` ); } - const parentResult = result.find( - (candidate) => candidate.blockHash === block.parentHash - ); return { - block: { - block: decodedBlock, - result: this.blockResultMapper.mapIn(correspondingResult), - }, - lastBlockResult: - parentResult !== undefined - ? this.blockResultMapper.mapIn(parentResult) - : undefined, + block: decodedBlock, + result: this.blockResultMapper.mapIn(result), }; }); } diff --git a/packages/persistance/src/services/prisma/mappers/BlockMapper.ts b/packages/persistance/src/services/prisma/mappers/BlockMapper.ts index 5d394fd1b..bd5ee9b06 100644 --- a/packages/persistance/src/services/prisma/mappers/BlockMapper.ts +++ b/packages/persistance/src/services/prisma/mappers/BlockMapper.ts @@ -6,8 +6,14 @@ import { Field } from "o1js"; import { ObjectMapper } from "../../../ObjectMapper"; +import { StateTransitionArrayMapper } from "./StateTransitionMapper"; + @singleton() export class BlockMapper implements ObjectMapper { + public constructor( + private readonly stArrayMapper: StateTransitionArrayMapper + ) {} + public mapIn(input: PrismaBlock): Block { return { transactions: [], @@ -30,10 +36,15 @@ export class BlockMapper implements ObjectMapper { fromBlockHashRoot: Field(input.fromBlockHashRoot), fromMessagesHash: Field(input.fromMessagesHash), toMessagesHash: Field(input.toMessagesHash), + fromStateRoot: Field(input.fromStateRoot), transactionsHash: Field(input.transactionsHash), previousBlockHash: input.parentHash !== null ? Field(input.parentHash) : undefined, + + beforeBlockStateTransitions: this.stArrayMapper.mapIn( + input.beforeBlockStateTransitions + ), }; } @@ -47,11 +58,16 @@ export class BlockMapper implements ObjectMapper { fromBlockHashRoot: input.fromBlockHashRoot.toString(), fromMessagesHash: input.fromMessagesHash.toString(), toMessagesHash: input.toMessagesHash.toString(), + fromStateRoot: input.fromStateRoot.toString(), hash: input.hash.toString(), transactionsHash: input.transactionsHash.toString(), parentHash: input.previousBlockHash?.toString() ?? null, batchHeight: null, + + beforeBlockStateTransitions: this.stArrayMapper.mapOut( + input.beforeBlockStateTransitions + ), }; } } diff --git a/packages/persistance/src/services/prisma/mappers/BlockResultMapper.ts b/packages/persistance/src/services/prisma/mappers/BlockResultMapper.ts index cac65f9a1..fd65c6d02 100644 --- a/packages/persistance/src/services/prisma/mappers/BlockResultMapper.ts +++ b/packages/persistance/src/services/prisma/mappers/BlockResultMapper.ts @@ -28,10 +28,12 @@ export class BlockResultMapper // eslint-disable-next-line @typescript-eslint/no-unsafe-argument BlockHashMerkleTreeWitness.fromJSON(input.blockHashWitness as any) ), - blockStateTransitions: this.stArrayMapper.mapIn( - input.blockStateTransitions + afterBlockStateTransitions: this.stArrayMapper.mapIn( + input.afterBlockStateTransitions ), blockHash: BigInt(input.blockHash), + + witnessedRoots: [BigInt(input.witnessedRoots[0])], }; } @@ -44,10 +46,12 @@ export class BlockResultMapper blockHashWitness: BlockHashMerkleTreeWitness.toJSON( input.blockHashWitness ), - blockStateTransitions: this.stArrayMapper.mapOut( - input.blockStateTransitions + afterBlockStateTransitions: this.stArrayMapper.mapOut( + input.afterBlockStateTransitions ), afterNetworkState: NetworkState.toJSON(input.afterNetworkState), + + witnessedRoots: [input.witnessedRoots[0].toString()], }; } } diff --git a/packages/persistance/src/services/prisma/mappers/StateTransitionMapper.ts b/packages/persistance/src/services/prisma/mappers/StateTransitionMapper.ts index 2c1627d01..ed35da7b3 100644 --- a/packages/persistance/src/services/prisma/mappers/StateTransitionMapper.ts +++ b/packages/persistance/src/services/prisma/mappers/StateTransitionMapper.ts @@ -1,5 +1,8 @@ import { singleton } from "tsyringe"; -import { UntypedStateTransition } from "@proto-kit/sequencer"; +import { + StateTransitionBatch, + UntypedStateTransition, +} from "@proto-kit/sequencer"; import { Prisma } from "@prisma/client"; import { ObjectMapper } from "../../../ObjectMapper"; @@ -40,3 +43,38 @@ export class StateTransitionArrayMapper return input.map((st) => this.stMapper.mapOut(st)) as Prisma.JsonArray; } } + +@singleton() +export class StateTransitionBatchArrayMapper + implements ObjectMapper +{ + public constructor( + private readonly stArrayMapper: StateTransitionArrayMapper + ) {} + + public mapOut(input: StateTransitionBatch[]): Prisma.JsonValue { + return input.map((st) => ({ + stateTransitions: this.stArrayMapper.mapOut( + st.stateTransitions + ) as Prisma.JsonArray, + applied: st.applied, + })); + } + + public mapIn(input: Prisma.JsonValue): StateTransitionBatch[] { + if (input === undefined) return []; + + if (Array.isArray(input)) { + return (input as Prisma.JsonArray).map((stJson) => { + const batchJsonObject = stJson as Prisma.JsonObject; + return { + stateTransitions: this.stArrayMapper.mapIn( + batchJsonObject.stateTransitions + ), + applied: batchJsonObject.applied as boolean, + }; + }); + } + return []; + } +} diff --git a/packages/persistance/src/services/prisma/mappers/TransactionMapper.ts b/packages/persistance/src/services/prisma/mappers/TransactionMapper.ts index 83f5e37e3..1e242dace 100644 --- a/packages/persistance/src/services/prisma/mappers/TransactionMapper.ts +++ b/packages/persistance/src/services/prisma/mappers/TransactionMapper.ts @@ -11,7 +11,7 @@ import { Bool } from "o1js"; import { ObjectMapper } from "../../../ObjectMapper"; -import { StateTransitionArrayMapper } from "./StateTransitionMapper"; +import { StateTransitionBatchArrayMapper } from "./StateTransitionMapper"; import { EventArrayMapper } from "./EventMapper"; @singleton() @@ -55,7 +55,7 @@ export class TransactionExecutionResultMapper { public constructor( private readonly transactionMapper: TransactionMapper, - private readonly stArrayMapper: StateTransitionArrayMapper, + private readonly stBatchMapper: StateTransitionBatchArrayMapper, private readonly eventArrayMapper: EventArrayMapper ) {} @@ -67,12 +67,9 @@ export class TransactionExecutionResultMapper tx: this.transactionMapper.mapIn(input[1]), status: Bool(executionResult.status), statusMessage: executionResult.statusMessage ?? undefined, - stateTransitions: this.stArrayMapper.mapIn( + stateTransitions: this.stBatchMapper.mapIn( executionResult.stateTransitions ), - protocolTransitions: this.stArrayMapper.mapIn( - executionResult.protocolTransitions - ), events: this.eventArrayMapper.mapIn(executionResult.events), }; } @@ -84,8 +81,7 @@ export class TransactionExecutionResultMapper const executionResult = { status: input.status.toBoolean(), statusMessage: input.statusMessage ?? null, - stateTransitions: this.stArrayMapper.mapOut(input.stateTransitions), - protocolTransitions: this.stArrayMapper.mapOut(input.protocolTransitions), + stateTransitions: this.stBatchMapper.mapOut(input.stateTransitions), events: this.eventArrayMapper.mapOut(input.events), txHash: tx.hash, }; diff --git a/packages/protocol/src/hooks/AccountStateHook.ts b/packages/protocol/src/hooks/AccountStateHook.ts index 2312299be..b3aa3e9fe 100644 --- a/packages/protocol/src/hooks/AccountStateHook.ts +++ b/packages/protocol/src/hooks/AccountStateHook.ts @@ -1,10 +1,13 @@ import { PublicKey, Struct, UInt64 } from "o1js"; import { injectable } from "tsyringe"; +import { noop } from "@proto-kit/common"; -import { BlockProverExecutionData } from "../prover/block/BlockProvable"; import { StateMap } from "../state/StateMap"; import { protocolState } from "../state/protocol/ProtocolState"; -import { ProvableTransactionHook } from "../protocol/ProvableTransactionHook"; +import { + ProvableTransactionHook, + BeforeTransactionHookArguments, +} from "../protocol/ProvableTransactionHook"; import { assert } from "../state/assert/assert"; export class AccountState extends Struct({ @@ -18,7 +21,9 @@ export class AccountStateHook extends ProvableTransactionHook { AccountState ); - public async onTransaction({ transaction }: BlockProverExecutionData) { + public async beforeTransaction({ + transaction, + }: BeforeTransactionHookArguments) { const sender = transaction.sender.value; const aso = await this.accountState.get(sender); @@ -45,4 +50,8 @@ export class AccountStateHook extends ProvableTransactionHook { await this.accountState.set(sender, new AccountState({ nonce: newNonce })); } + + public async afterTransaction() { + noop(); + } } diff --git a/packages/protocol/src/hooks/LastStateRootBlockHook.ts b/packages/protocol/src/hooks/LastStateRootBlockHook.ts index d12dc0d44..4c7cf8091 100644 --- a/packages/protocol/src/hooks/LastStateRootBlockHook.ts +++ b/packages/protocol/src/hooks/LastStateRootBlockHook.ts @@ -1,26 +1,25 @@ -import { ProvableBlockHook } from "../protocol/ProvableBlockHook"; +import { + AfterBlockHookArguments, + ProvableBlockHook, +} from "../protocol/ProvableBlockHook"; import { NetworkState } from "../model/network/NetworkState"; -import { BlockProverState } from "../prover/block/BlockProver"; export class LastStateRootBlockHook extends ProvableBlockHook< Record > { public async afterBlock( networkState: NetworkState, - state: BlockProverState + { stateRoot }: AfterBlockHookArguments ): Promise { return new NetworkState({ block: networkState.block, previous: { - rootHash: state.stateRoot, + rootHash: stateRoot, }, }); } - public async beforeBlock( - networkState: NetworkState, - state: BlockProverState - ): Promise { + public async beforeBlock(networkState: NetworkState): Promise { return networkState; } } diff --git a/packages/protocol/src/hooks/NoopBlockHook.ts b/packages/protocol/src/hooks/NoopBlockHook.ts index 193e14019..a836edc81 100644 --- a/packages/protocol/src/hooks/NoopBlockHook.ts +++ b/packages/protocol/src/hooks/NoopBlockHook.ts @@ -1,20 +1,23 @@ import { NoConfig } from "@proto-kit/common"; -import { ProvableBlockHook } from "../protocol/ProvableBlockHook"; +import { + AfterBlockHookArguments, + BeforeBlockHookArguments, + ProvableBlockHook, +} from "../protocol/ProvableBlockHook"; import { NetworkState } from "../model/network/NetworkState"; -import { BlockProverState } from "../prover/block/BlockProver"; export class NoopBlockHook extends ProvableBlockHook { public async afterBlock( networkState: NetworkState, - state: BlockProverState + state: AfterBlockHookArguments ): Promise { return networkState; } public async beforeBlock( networkState: NetworkState, - state: BlockProverState + state: BeforeBlockHookArguments ): Promise { return networkState; } diff --git a/packages/protocol/src/hooks/NoopTransactionHook.ts b/packages/protocol/src/hooks/NoopTransactionHook.ts index 06e779d53..31173940a 100644 --- a/packages/protocol/src/hooks/NoopTransactionHook.ts +++ b/packages/protocol/src/hooks/NoopTransactionHook.ts @@ -1,10 +1,13 @@ import { noop } from "@proto-kit/common"; import { ProvableTransactionHook } from "../protocol/ProvableTransactionHook"; -import { BlockProverExecutionData } from "../prover/block/BlockProvable"; export class NoopTransactionHook extends ProvableTransactionHook { - public async onTransaction(executionData: BlockProverExecutionData) { + public async beforeTransaction() { + noop(); + } + + public async afterTransaction() { noop(); } } diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 355d6e365..e4419af3a 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -11,12 +11,17 @@ export * from "./model/transaction/RuntimeTransaction"; export * from "./model/transaction/ValueOption"; export * from "./model/MethodPublicOutput"; export * from "./model/RuntimeLike"; +export * from "./model/AppliedStateTransitionBatch"; export * from "./utils/ProvableHashList"; export * from "./utils/PrefixedProvableHashList"; export * from "./utils/MinaPrefixedProvableHashList"; export * from "./utils/ProvableReductionHashList"; -export * from "./utils/StateTransitionReductionList"; export * from "./utils/utils"; +export * from "./utils/FieldOptions"; +export * from "./prover/accumulators/StateTransitionReductionList"; +export * from "./prover/accumulators/AppliedBatchHashList"; +export * from "./prover/accumulators/WitnessedRootHashList"; +export * from "./prover/accumulators/TransactionHashList"; export * from "./prover/block/BlockProver"; export * from "./prover/block/BlockProvable"; export * from "./prover/block/accummulators/RuntimeVerificationKeyTree"; diff --git a/packages/protocol/src/model/AppliedStateTransitionBatch.ts b/packages/protocol/src/model/AppliedStateTransitionBatch.ts new file mode 100644 index 000000000..cf904631a --- /dev/null +++ b/packages/protocol/src/model/AppliedStateTransitionBatch.ts @@ -0,0 +1,16 @@ +import { Bool, Field, Poseidon, Provable, Struct } from "o1js"; + +export class AppliedStateTransitionBatch extends Struct({ + batchHash: Field, + applied: Bool, +}) {} + +export class AppliedStateTransitionBatchState extends Struct({ + batchHash: Field, + root: Field, +}) { + public hashOrZero(): Field { + const hash = Poseidon.hash(AppliedStateTransitionBatchState.toFields(this)); + return Provable.if(this.batchHash.equals(0), Field(0), hash); + } +} diff --git a/packages/protocol/src/model/StateTransition.ts b/packages/protocol/src/model/StateTransition.ts index c4a67e9e5..79b75f269 100644 --- a/packages/protocol/src/model/StateTransition.ts +++ b/packages/protocol/src/model/StateTransition.ts @@ -23,6 +23,12 @@ export class ProvableStateTransition extends Struct({ to: Option.none().toProvable(), }); } + + public static isDummy(stateTransition: ProvableStateTransition) { + return stateTransition.path + .equals(0) + .and(stateTransition.from.isSome.not()); + } } /** diff --git a/packages/protocol/src/model/StateTransitionProvableBatch.ts b/packages/protocol/src/model/StateTransitionProvableBatch.ts index cbd7283be..c63aeb347 100644 --- a/packages/protocol/src/model/StateTransitionProvableBatch.ts +++ b/packages/protocol/src/model/StateTransitionProvableBatch.ts @@ -1,50 +1,74 @@ -import { Bool, Provable, Struct } from "o1js"; -import { - InMemoryMerkleTreeStorage, - range, - RollupMerkleTree, - RollupMerkleTreeWitness, -} from "@proto-kit/common"; +import { Bool, Field, Provable, Struct } from "o1js"; +import { batch, RollupMerkleTreeWitness } from "@proto-kit/common"; import { constants } from "../Constants"; import { ProvableStateTransition } from "./StateTransition.js"; export class StateTransitionType { - public static readonly normal = true; + public static readonly nothing = 2; - public static readonly protocol = false; + // The reason these are 0 and 1 is to efficiently check + // x in [inside, closing] in-circuit via the boolean trick + public static readonly closeAndApply = 1; - public static isNormal(type: boolean) { - return type === StateTransitionType.normal; - } - - public static isProtocol(type: boolean) { - return type === StateTransitionType.protocol; - } + public static readonly closeAndThrowAway = 0; } +/** + * STType is encoding both the type and whether it should be accumulated or not in one field + */ export class ProvableStateTransitionType extends Struct({ - type: Bool, + type: Field, }) { - public static get normal(): ProvableStateTransitionType { - return new ProvableStateTransitionType({ - type: Bool(StateTransitionType.normal), - }); + public static get nothing(): ProvableStateTransitionType { + return this.from(StateTransitionType.nothing); + } + + public static get closeAndApply(): ProvableStateTransitionType { + return this.from(StateTransitionType.closeAndApply); } - public static get protocol(): ProvableStateTransitionType { + public static get closeAndThrowAway(): ProvableStateTransitionType { + return this.from(StateTransitionType.closeAndThrowAway); + } + + private static from(constant: number) { return new ProvableStateTransitionType({ - type: Bool(StateTransitionType.protocol), + type: Field(constant), }); } - public isNormal(): Bool { - return this.type; + public isClosing() { + const { type } = this; + // check if base is 0 or 1 + // 0^2 == 0 && 1^2 == 1 + return type.mul(type).equals(type); + } + + public isNothing() { + return this.type.equals(ProvableStateTransitionType.nothing.type); } +} + +export class MerkleWitnessBatch extends Struct({ + witnesses: Provable.Array( + RollupMerkleTreeWitness, + constants.stateTransitionProverBatchSize + ), +}) {} - public isProtocol(): Bool { - return this.type.not(); +export class ProvableStateTransitionEntry extends Struct({ + stateTransition: ProvableStateTransition, + type: ProvableStateTransitionType, + witnessRoot: Bool, +}) { + public static dummy(): ProvableStateTransitionEntry { + return { + stateTransition: ProvableStateTransition.dummy(), + type: ProvableStateTransitionType.nothing, + witnessRoot: Bool(false), + }; } } @@ -52,94 +76,59 @@ export class ProvableStateTransitionType extends Struct({ * A Batch of StateTransitions to be consumed by the StateTransitionProver * to prove multiple STs at once * - * transitionType: - * true == normal ST, false == protocol ST + * The batch is formed as an array fo ProvableSTEntries, which have a type and + * witnessesRoot flag attached to them. */ export class StateTransitionProvableBatch extends Struct({ batch: Provable.Array( - ProvableStateTransition, - constants.stateTransitionProverBatchSize - ), - - transitionTypes: Provable.Array( - ProvableStateTransitionType, - constants.stateTransitionProverBatchSize - ), - - merkleWitnesses: Provable.Array( - RollupMerkleTreeWitness, + ProvableStateTransitionEntry, constants.stateTransitionProverBatchSize ), }) { - public static fromMappings( - transitions: { - transition: ProvableStateTransition; - type: ProvableStateTransitionType; - }[], - merkleWitnesses: RollupMerkleTreeWitness[] - ): StateTransitionProvableBatch { - const batch = transitions.map((entry) => entry.transition); - const transitionTypes = transitions.map((entry) => entry.type); - const witnesses = merkleWitnesses.slice(); - // Check that order is correct - let normalSTsStarted = false; - transitionTypes.forEach((x) => { - if (!normalSTsStarted && x.isNormal().toBoolean()) { - normalSTsStarted = true; + public static fromBatches( + batches: { + stateTransitions: ProvableStateTransition[]; + applied: Bool; + witnessRoot: Bool; + }[] + ): StateTransitionProvableBatch[] { + const flattened: ProvableStateTransitionEntry[] = []; + + for (const stBatch of batches) { + const entries = + stBatch.stateTransitions.map( + (stateTransition, j, sts) => { + return { + stateTransition, + type: + // eslint-disable-next-line no-nested-ternary + j === sts.length - 1 + ? stBatch.applied.toBoolean() + ? ProvableStateTransitionType.closeAndApply + : ProvableStateTransitionType.closeAndThrowAway + : ProvableStateTransitionType.nothing, + witnessRoot: Bool(false), + }; + } + ); + + flattened.push(...entries); + + if (stBatch.witnessRoot.toBoolean() && flattened.length > 0) { + flattened.at(-1)!.witnessRoot = Bool(true); } - if (normalSTsStarted && x.isProtocol().toBoolean()) { - throw new Error("Order in initializing STBatch not correct"); - } - }); - - while (batch.length < constants.stateTransitionProverBatchSize) { - batch.push(ProvableStateTransition.dummy()); - transitionTypes.push(ProvableStateTransitionType.normal); - witnesses.push( - new RollupMerkleTree(new InMemoryMerkleTreeStorage()).getWitness( - BigInt(0) - ) - ); } - return new StateTransitionProvableBatch({ - batch, - transitionTypes, - merkleWitnesses: witnesses, - }); - } - public static fromTransitions( - transitions: ProvableStateTransition[], - protocolTransitions: ProvableStateTransition[], - merkleWitnesses: RollupMerkleTreeWitness[] - ): StateTransitionProvableBatch { - const array = transitions.slice().concat(protocolTransitions); - - const transitionTypes = range(0, transitions.length) - .map(() => ProvableStateTransitionType.normal) - .concat( - range(0, protocolTransitions.length).map( - () => ProvableStateTransitionType.protocol - ) - ); - - while (array.length < constants.stateTransitionProverBatchSize) { - array.push(ProvableStateTransition.dummy()); - transitionTypes.push(ProvableStateTransitionType.normal); - } + const values = batch( + flattened, + constants.stateTransitionProverBatchSize, + () => ProvableStateTransitionEntry.dummy() + ); - return new StateTransitionProvableBatch({ - batch: array, - transitionTypes, - merkleWitnesses, + return values.map((stBatch) => { + return new StateTransitionProvableBatch({ + batch: stBatch, + }); }); } - - private constructor(object: { - batch: ProvableStateTransition[]; - transitionTypes: ProvableStateTransitionType[]; - merkleWitnesses: RollupMerkleTreeWitness[]; - }) { - super(object); - } } diff --git a/packages/protocol/src/model/transaction/RuntimeTransaction.ts b/packages/protocol/src/model/transaction/RuntimeTransaction.ts index 8adb3662c..6bf262e54 100644 --- a/packages/protocol/src/model/transaction/RuntimeTransaction.ts +++ b/packages/protocol/src/model/transaction/RuntimeTransaction.ts @@ -73,6 +73,10 @@ export class RuntimeTransaction extends Struct({ .assertTrue("Transaction sender is not set to dummy"); } + public isDummy(): Bool { + return this.methodId.equals(0); + } + public hashData(): Field[] { return [ this.methodId, diff --git a/packages/protocol/src/protocol/ProvableBlockHook.ts b/packages/protocol/src/protocol/ProvableBlockHook.ts index 29abc18c3..c28a46a04 100644 --- a/packages/protocol/src/protocol/ProvableBlockHook.ts +++ b/packages/protocol/src/protocol/ProvableBlockHook.ts @@ -1,7 +1,55 @@ -import type { BlockProverState } from "../prover/block/BlockProver"; +import { Field } from "o1js"; + import { NetworkState } from "../model/network/NetworkState"; +import { MethodPublicOutput } from "../model/MethodPublicOutput"; +import { BlockProverTransactionArguments } from "../prover/block/BlockProvable"; import { TransitioningProtocolModule } from "./TransitioningProtocolModule"; +import { + AfterTransactionHookArguments, + BeforeTransactionHookArguments, + ProvableHookBlockState, + toProvableHookBlockState, +} from "./ProvableTransactionHook"; + +export interface BeforeBlockHookArguments extends ProvableHookBlockState {} + +export interface AfterBlockHookArguments extends BeforeBlockHookArguments { + stateRoot: Field; +} + +export function toBeforeTransactionHookArgument( + executionData: Omit< + BlockProverTransactionArguments, + "verificationKeyAttestation" + >, + networkState: NetworkState, + state: Parameters[0] +): BeforeTransactionHookArguments { + const { transaction, signature } = executionData; + + return { + networkState, + transaction, + signature, + prover: toProvableHookBlockState(state), + }; +} + +export function toAfterTransactionHookArgument( + executionData: Omit< + BlockProverTransactionArguments, + "verificationKeyAttestation" + >, + networkState: NetworkState, + state: Parameters[0], + runtimeResult: MethodPublicOutput +): AfterTransactionHookArguments { + return { + ...toBeforeTransactionHookArgument(executionData, networkState, state), + runtimeResult, + }; +} // Purpose is to build transition from -> to network state export abstract class ProvableBlockHook< @@ -9,11 +57,11 @@ export abstract class ProvableBlockHook< > extends TransitioningProtocolModule { public abstract beforeBlock( networkState: NetworkState, - state: BlockProverState + state: BeforeBlockHookArguments ): Promise; public abstract afterBlock( networkState: NetworkState, - state: BlockProverState + state: AfterBlockHookArguments ): Promise; } diff --git a/packages/protocol/src/protocol/ProvableTransactionHook.ts b/packages/protocol/src/protocol/ProvableTransactionHook.ts index 91f600515..247517b44 100644 --- a/packages/protocol/src/protocol/ProvableTransactionHook.ts +++ b/packages/protocol/src/protocol/ProvableTransactionHook.ts @@ -1,13 +1,77 @@ import { NoConfig } from "@proto-kit/common"; +import { Signature } from "o1js"; -import { BlockProverExecutionData } from "../prover/block/BlockProvable"; +import { RuntimeTransaction } from "../model/transaction/RuntimeTransaction"; +import { NetworkState } from "../model/network/NetworkState"; +import { MethodPublicOutput } from "../model/MethodPublicOutput"; +import type { + BlockProverState, + BlockProverStateCommitments, +} from "../prover/block/BlockProvable"; import { TransitioningProtocolModule } from "./TransitioningProtocolModule"; +export type ProvableHookBlockState = Pick< + BlockProverStateCommitments, + | "transactionsHash" + | "eternalTransactionsHash" + | "incomingMessagesHash" + | "blockHashRoot" +>; + +export function toProvableHookBlockState( + state: Pick< + BlockProverState, + | "transactionList" + | "eternalTransactionsList" + | "incomingMessages" + | "blockHashRoot" + > +) { + const { + transactionList, + eternalTransactionsList, + incomingMessages, + blockHashRoot, + } = state; + return { + transactionsHash: transactionList.commitment, + eternalTransactionsHash: eternalTransactionsList.commitment, + incomingMessagesHash: incomingMessages.commitment, + blockHashRoot, + }; +} + +/** + * This type is a reduced set of the runtime method's public output. + * It omits internal commitments to data that is already present as data in + * the hook arguments + */ +export type TransactionResult = Omit< + MethodPublicOutput, + "networkStateHash" | "transactionHash" +>; + +export interface BeforeTransactionHookArguments { + transaction: RuntimeTransaction; + signature: Signature; + networkState: NetworkState; + prover: ProvableHookBlockState; +} + +export interface AfterTransactionHookArguments + extends BeforeTransactionHookArguments { + runtimeResult: TransactionResult; +} + export abstract class ProvableTransactionHook< Config = NoConfig, > extends TransitioningProtocolModule { - public abstract onTransaction( - executionData: BlockProverExecutionData + public abstract beforeTransaction( + executionData: BeforeTransactionHookArguments + ): Promise; + + public abstract afterTransaction( + execution: AfterTransactionHookArguments ): Promise; } diff --git a/packages/protocol/src/prover/accumulators/AppliedBatchHashList.ts b/packages/protocol/src/prover/accumulators/AppliedBatchHashList.ts new file mode 100644 index 000000000..6619f5584 --- /dev/null +++ b/packages/protocol/src/prover/accumulators/AppliedBatchHashList.ts @@ -0,0 +1,32 @@ +import { Bool, Field } from "o1js"; + +import { DefaultProvableHashList } from "../../utils/ProvableHashList"; +import { NonMethods } from "../../utils/utils"; +import { AppliedStateTransitionBatch } from "../../model/AppliedStateTransitionBatch"; + +/** + * A HashList for AppliedSTBatches, that in addition to the default + * functionality, checks that the pushed batches are not empty. + * If they are, the pushing is skipped. + * Note that if the batch has applied: false, the batch still has to be appended + * if it has STs inside + */ +export class AppliedBatchHashList extends DefaultProvableHashList< + NonMethods +> { + public constructor(commitment: Field = Field(0)) { + super(AppliedStateTransitionBatch, commitment); + } + + private isNotEmpty(value: AppliedStateTransitionBatch): Bool { + return value.batchHash.equals(Field(0)).not(); + } + + public push(value: AppliedStateTransitionBatch) { + return super.pushIf(value, this.isNotEmpty(value)); + } + + public pushIf(value: AppliedStateTransitionBatch, condition: Bool) { + return super.pushIf(value, condition.and(this.isNotEmpty(value))); + } +} diff --git a/packages/protocol/src/utils/StateTransitionReductionList.ts b/packages/protocol/src/prover/accumulators/StateTransitionReductionList.ts similarity index 90% rename from packages/protocol/src/utils/StateTransitionReductionList.ts rename to packages/protocol/src/prover/accumulators/StateTransitionReductionList.ts index b7c56a657..654d29631 100644 --- a/packages/protocol/src/utils/StateTransitionReductionList.ts +++ b/packages/protocol/src/prover/accumulators/StateTransitionReductionList.ts @@ -1,12 +1,11 @@ import { Provable } from "o1js"; -import { ProvableOption } from "../model/Option"; +import { ProvableOption } from "../../model/Option"; import { ProvableStateTransition, StateTransition, -} from "../model/StateTransition"; - -import { ProvableReductionHashList } from "./ProvableReductionHashList"; +} from "../../model/StateTransition"; +import { ProvableReductionHashList } from "../../utils/ProvableReductionHashList"; export class StateTransitionReductionList extends ProvableReductionHashList { public push(value: ProvableStateTransition) { @@ -56,6 +55,10 @@ export class StateTransitionReductionList extends ProvableReductionHashList { + public constructor(commitment: Field = Field(0)) { + super(Field, commitment); + } +} diff --git a/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts b/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts new file mode 100644 index 000000000..9a8250168 --- /dev/null +++ b/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts @@ -0,0 +1,61 @@ +import { Bool, Field, Struct } from "o1js"; + +import { DefaultProvableHashList } from "../../utils/ProvableHashList"; + +/** + * Link between a certain applied batch stack and a given root hash that the + * stack has to evaluate to at that given point in time + */ +export class WitnessedRoot extends Struct({ + appliedBatchListState: Field, + root: Field, +}) {} + +export class WitnessedRootWitness extends Struct({ + witnessedRoot: Field, + preimage: Field, +}) {} + +/** + * Accumulator as of section "Intermediary state roots" of the STProver v2 spec + */ + +export class WitnessedRootHashList extends DefaultProvableHashList { + public constructor(commitment: Field = Field(0)) { + super(WitnessedRoot, commitment); + } + + /** + * To be used by the BlockProver or for tracing + * + * The main purpose of this method compared to the simple push methods + * is for deduplicating witnessed roots. We need to do this because the + * STProver can only witness once per batch, therefore if multiple witness + * points fall back to the same ST (because any batches in between were empty), + * this has to be detected and compensated for. + * This function does this using the preimage of the current list state. + * + * @param preimage The preimage to the **current** state of the list. + */ + public witnessRoot( + witnessedRoot: WitnessedRoot, + preimage: Field, + condition: Bool + ) { + // Note, we don't have to validate the preimage here because of the following + // 1. If the sequencer doesn't provide the correct witness, the BlockProver's + // equality check will fail + // 2. If the list is empty, no preimage exists, therefore condition (2) doesn't + // apply, which is the same outcome when the sequencer provides an arbitrary witness + const preimageCheckList = new WitnessedRootHashList(preimage).push( + witnessedRoot + ); + + // Conditions: + // (1) don't append if witnessedRoot == finalizedRoot -> Already covered in BlockProver + // (2) don't append if preimage.push({ finalizedRoot, pendingSTBatchesHash }) == this.commitment + const skipPush = preimageCheckList.commitment.equals(this.commitment); + + return this.pushIf(witnessedRoot, condition.and(skipPush.not())); + } +} diff --git a/packages/protocol/src/prover/block/BlockProvable.ts b/packages/protocol/src/prover/block/BlockProvable.ts index d6e51a6cd..84d6493af 100644 --- a/packages/protocol/src/prover/block/BlockProvable.ts +++ b/packages/protocol/src/prover/block/BlockProvable.ts @@ -1,3 +1,4 @@ +// eslint-disable-next-line max-classes-per-file import { Bool, DynamicProof, @@ -13,23 +14,123 @@ import { StateTransitionProof } from "../statetransition/StateTransitionProvable import { MethodPublicOutput } from "../../model/MethodPublicOutput"; import { RuntimeTransaction } from "../../model/transaction/RuntimeTransaction"; import { NetworkState } from "../../model/network/NetworkState"; +import { TransactionHashList } from "../accumulators/TransactionHashList"; +import { MinaActionsHashList } from "../../utils/MinaPrefixedProvableHashList"; +import { AppliedBatchHashList } from "../accumulators/AppliedBatchHashList"; +import { + WitnessedRootHashList, + WitnessedRootWitness, +} from "../accumulators/WitnessedRootHashList"; import { BlockHashMerkleTreeWitness } from "./accummulators/BlockHashMerkleTree"; import { RuntimeVerificationKeyAttestation } from "./accummulators/RuntimeVerificationKeyTree"; -export class BlockProverPublicInput extends Struct({ +// Should be equal to BlockProver.PublicInput +export interface BlockProverState { + /** + * The current state root of the block prover + */ + stateRoot: Field; + + /** + * The current commitment of the transaction-list which + * will at the end equal the bundle hash + */ + transactionList: TransactionHashList; + + /** + * The network state which gives access to values such as blockHeight + * This value is the same for the whole batch (L2 block) + */ + networkState: NetworkState; + + /** + * The root of the merkle tree encoding all block hashes, + * see `BlockHashMerkleTree` + */ + blockHashRoot: Field; + + /** + * A variant of the transactionsHash that is never reset. + * Thought for usage in the sequence state mempool. + * In comparison, transactionsHash restarts at 0 for every new block + */ + eternalTransactionsList: TransactionHashList; + + pendingSTBatches: AppliedBatchHashList; + + incomingMessages: MinaActionsHashList; + + witnessedRoots: WitnessedRootHashList; + + blockNumber: Field; +} + +// TODO Sort and organize public inputs and outputs +export class BlockProverStateCommitments extends Struct({ transactionsHash: Field, stateRoot: Field, + // Commitment to the list of unprocessed (pending) batches of STs that need to be proven + pendingSTBatchesHash: Field, + witnessedRootsHash: Field, networkStateHash: Field, blockHashRoot: Field, eternalTransactionsHash: Field, incomingMessagesHash: Field, blockNumber: Field, -}) {} +}) { + public static fromBlockProverState( + state: BlockProverState + ): BlockProverStateCommitments { + return { + networkStateHash: state.networkState.hash(), + stateRoot: state.stateRoot, + blockNumber: state.blockNumber, + blockHashRoot: state.blockHashRoot, + pendingSTBatchesHash: state.pendingSTBatches.commitment, + transactionsHash: state.transactionList.commitment, + eternalTransactionsHash: state.eternalTransactionsList.commitment, + incomingMessagesHash: state.incomingMessages.commitment, + witnessedRootsHash: state.witnessedRoots.commitment, + }; + } + + public static toBlockProverState( + publicInput: BlockProverStateCommitments, + networkState: NetworkState + ): BlockProverState { + publicInput.networkStateHash.assertEquals( + networkState.hash(), + "ExecutionData Networkstate doesn't equal public input hash" + ); + + return { + networkState, + stateRoot: publicInput.stateRoot, + blockHashRoot: publicInput.blockHashRoot, + transactionList: new TransactionHashList(publicInput.transactionsHash), + eternalTransactionsList: new TransactionHashList( + publicInput.eternalTransactionsHash + ), + incomingMessages: new MinaActionsHashList( + publicInput.incomingMessagesHash + ), + pendingSTBatches: new AppliedBatchHashList( + publicInput.pendingSTBatchesHash + ), + witnessedRoots: new WitnessedRootHashList(publicInput.witnessedRootsHash), + blockNumber: publicInput.blockNumber, + }; + } +} + +export class BlockProverPublicInput extends BlockProverStateCommitments {} export class BlockProverPublicOutput extends Struct({ transactionsHash: Field, stateRoot: Field, + pendingSTBatchesHash: Field, + witnessedRootsHash: Field, networkStateHash: Field, blockHashRoot: Field, eternalTransactionsHash: Field, @@ -54,10 +155,10 @@ export type BlockProverProof = Proof< BlockProverPublicOutput >; -export class BlockProverExecutionData extends Struct({ +export class BlockProverTransactionArguments extends Struct({ transaction: RuntimeTransaction, signature: Signature, - networkState: NetworkState, + verificationKeyAttestation: RuntimeVerificationKeyAttestation, }) {} export class DynamicRuntimeProof extends DynamicProof< @@ -72,22 +173,40 @@ export class DynamicRuntimeProof extends DynamicProof< static maxProofsVerified = 0 as const; } +export class BlockProverSingleTransactionExecutionData extends Struct({ + transaction: BlockProverTransactionArguments, + networkState: NetworkState, +}) {} + +export class BlockProverMultiTransactionExecutionData extends Struct({ + transaction1: BlockProverTransactionArguments, + transaction2: BlockProverTransactionArguments, + networkState: NetworkState, +}) {} + export interface BlockProvable extends WithZkProgrammable, CompilableModule { proveTransaction: ( publicInput: BlockProverPublicInput, - stateProof: StateTransitionProof, - appProof: DynamicRuntimeProof, - executionData: BlockProverExecutionData, - verificationKeyAttestation: RuntimeVerificationKeyAttestation + runtimeProof: DynamicRuntimeProof, + executionData: BlockProverSingleTransactionExecutionData + ) => Promise; + + proveTransactions: ( + publicInput: BlockProverPublicInput, + runtimeProof1: DynamicRuntimeProof, + runtimeProof2: DynamicRuntimeProof, + executionData: BlockProverMultiTransactionExecutionData ) => Promise; proveBlock: ( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, - // stateTransitionProof: StateTransitionProof, + stateTransitionProof: StateTransitionProof, + deferSTs: Bool, + afterBlockRootWitness: WitnessedRootWitness, transactionProof: BlockProverProof ) => Promise; diff --git a/packages/protocol/src/prover/block/BlockProver.ts b/packages/protocol/src/prover/block/BlockProver.ts index ccbb67874..6362e7055 100644 --- a/packages/protocol/src/prover/block/BlockProver.ts +++ b/packages/protocol/src/prover/block/BlockProver.ts @@ -1,8 +1,6 @@ import { Bool, - DynamicProof, Field, - Poseidon, Proof, Provable, SelfProof, @@ -15,6 +13,7 @@ import { CompilableModule, CompileArtifact, CompileRegistry, + log, MAX_FIELD, PlainZkProgram, provableMethod, @@ -22,7 +21,6 @@ import { ZkProgrammable, } from "@proto-kit/common"; -import { DefaultProvableHashList } from "../../utils/ProvableHashList"; import { MethodPublicOutput } from "../../model/MethodPublicOutput"; import { ProtocolModule } from "../../protocol/ProtocolModule"; import { @@ -36,24 +34,43 @@ import { ProvableStateTransition, StateTransition, } from "../../model/StateTransition"; -import { ProvableTransactionHook } from "../../protocol/ProvableTransactionHook"; -import { RuntimeMethodExecutionContext } from "../../state/context/RuntimeMethodExecutionContext"; -import { ProvableBlockHook } from "../../protocol/ProvableBlockHook"; +import { + AfterTransactionHookArguments, + BeforeTransactionHookArguments, + ProvableTransactionHook, + toProvableHookBlockState, +} from "../../protocol/ProvableTransactionHook"; +import { + RuntimeMethodExecutionContext, + RuntimeMethodExecutionData, +} from "../../state/context/RuntimeMethodExecutionContext"; +import { + AfterBlockHookArguments, + BeforeBlockHookArguments, + ProvableBlockHook, + toAfterTransactionHookArgument, + toBeforeTransactionHookArgument, +} from "../../protocol/ProvableBlockHook"; import { NetworkState } from "../../model/network/NetworkState"; import { SignedTransaction } from "../../model/transaction/SignedTransaction"; -import { - MinaActions, - MinaActionsHashList, -} from "../../utils/MinaPrefixedProvableHashList"; -import { StateTransitionReductionList } from "../../utils/StateTransitionReductionList"; +import { MinaActions } from "../../utils/MinaPrefixedProvableHashList"; +import { StateTransitionReductionList } from "../accumulators/StateTransitionReductionList"; +import { assertEqualsIf } from "../../utils/utils"; +import { WitnessedRootWitness } from "../accumulators/WitnessedRootHashList"; +import { StateServiceProvider } from "../../state/StateServiceProvider"; +import { AppliedStateTransitionBatch } from "../../model/AppliedStateTransitionBatch"; import { BlockProvable, - BlockProverExecutionData, BlockProverProof, BlockProverPublicInput, BlockProverPublicOutput, DynamicRuntimeProof, + BlockProverMultiTransactionExecutionData, + BlockProverTransactionArguments, + BlockProverSingleTransactionExecutionData, + BlockProverState, + BlockProverStateCommitments, } from "./BlockProvable"; import { BlockHashMerkleTreeWitness, @@ -67,12 +84,6 @@ import { import { RuntimeVerificationKeyRootService } from "./services/RuntimeVerificationKeyRootService"; const errors = { - stateProofNotStartingAtZero: () => - "StateProof not starting ST-commitment at zero", - - stateTransitionsHashNotEqual: () => - "StateTransition list commitments are not equal", - propertyNotMatchingStep: (propertyName: string, step: string) => `${propertyName} not matching: ${step}`, @@ -89,48 +100,14 @@ const errors = { invalidZkProgramTreeRoot: () => "Root hash of the provided zkProgram config witness is invalid", - - invalidZkProgramConfigMethodId: () => - "Method id of the provided zkProgram config does not match the executed transaction method id", }; -// Should be equal to BlockProver.PublicInput -export interface BlockProverState { - /** - * The current state root of the block prover - */ - stateRoot: Field; - - /** - * The current commitment of the transaction-list which - * will at the end equal the bundle hash - */ - transactionsHash: Field; - - /** - * The network state which gives access to values such as blockHeight - * This value is the same for the whole batch (L2 block) - */ - networkStateHash: Field; - - /** - * The root of the merkle tree encoding all block hashes, - * see `BlockHashMerkleTree` - */ - blockHashRoot: Field; - - /** - * A variant of the transactionsHash that is never reset. - * Thought for usage in the sequence state mempool. - * In comparison, transactionsHash restarts at 0 for every new block - */ - eternalTransactionsHash: Field; - - incomingMessagesHash: Field; -} +type ApplyTransactionArguments = Omit< + BlockProverTransactionArguments, + "verificationKeyAttestation" +>; export type BlockProof = Proof; -export type RuntimeProof = Proof; export class BlockProverProgrammable extends ZkProgrammable< BlockProverPublicInput, @@ -142,9 +119,9 @@ export class BlockProverProgrammable extends ZkProgrammable< StateTransitionProverPublicInput, StateTransitionProverPublicOutput >, - public readonly runtime: ZkProgrammable, private readonly transactionHooks: ProvableTransactionHook[], private readonly blockHooks: ProvableBlockHook[], + private readonly stateServiceProvider: StateServiceProvider, private readonly verificationKeyService: MinimalVKTreeService ) { super(); @@ -158,78 +135,80 @@ export class BlockProverProgrammable extends ZkProgrammable< /** * Applies and checks the two proofs and applies the corresponding state - * changes to the given state + * changes to the given state. + * + * The rough high level workflow of this function: + * 1. Execute beforeTransaction hooks, pushing the ST batch + * 2. Add Transaction to bundle, meaning appending it to all the respective commitments + * 3. Push the runtime ST batch + * 4. Execute afterTransaction hooks, pushing the ST batch + * 5. Some consistency checks and signature verification * - * @param state The from-state of the BlockProver - * @param stateTransitionProof - * @param runtimeProof + * @param fromState The from-state of the BlockProver + * @param runtimeOutput * @param executionData - * @param verificationKey + * @param networkState * @returns The new BlockProver-state to be used as public output */ public async applyTransaction( - state: BlockProverState, - stateTransitionProof: Proof< - StateTransitionProverPublicInput, - StateTransitionProverPublicOutput - >, - runtimeProof: DynamicRuntimeProof, - executionData: BlockProverExecutionData, - verificationKey: VerificationKey + fromState: BlockProverState, + runtimeOutput: MethodPublicOutput, + executionData: ApplyTransactionArguments, + networkState: NetworkState ): Promise { - const { transaction, networkState, signature } = executionData; + const { transaction, signature } = executionData; - const { isMessage } = runtimeProof.publicOutput; - - runtimeProof.verify(verificationKey); - stateTransitionProof.verify(); + let state = { ...fromState }; - const stateTo = { ...state }; + const { isMessage } = runtimeOutput; - // Checks for the stateTransitionProof and appProof matching - stateTransitionProof.publicInput.stateTransitionsHash.assertEquals( - Field(0), - errors.stateProofNotStartingAtZero() - ); - stateTransitionProof.publicInput.protocolTransitionsHash.assertEquals( - Field(0), - errors.stateProofNotStartingAtZero() + const beforeTxHookArguments = toBeforeTransactionHookArgument( + executionData, + networkState, + state ); - runtimeProof.publicOutput.stateTransitionsHash.assertEquals( - stateTransitionProof.publicOutput.stateTransitionsHash, - errors.stateTransitionsHashNotEqual() + // Apply beforeTransaction hook state transitions + const beforeBatch = await this.executeTransactionHooks( + async (module, args) => await module.beforeTransaction(args), + beforeTxHookArguments ); - // Assert from state roots - state.stateRoot.assertEquals( - stateTransitionProof.publicInput.stateRoot, - errors.propertyNotMatching("from state root") - ); - state.stateRoot.assertEquals( - stateTransitionProof.publicInput.protocolStateRoot, - errors.propertyNotMatching("from protocol state root") + state = this.addTransactionToBundle( + state, + runtimeOutput.isMessage, + transaction ); - // Apply protocol state transitions - await this.assertProtocolTransitions( - stateTransitionProof, + state.pendingSTBatches.push(beforeBatch); + + state.pendingSTBatches.push({ + batchHash: runtimeOutput.stateTransitionsHash, + applied: runtimeOutput.status, + }); + + // Apply afterTransaction hook state transitions + const afterTxHookArguments = toAfterTransactionHookArgument( executionData, - runtimeProof + networkState, + state, + runtimeOutput ); - // Apply state if status success - stateTo.stateRoot = Provable.if( - runtimeProof.publicOutput.status, - stateTransitionProof.publicOutput.stateRoot, - stateTransitionProof.publicOutput.protocolStateRoot + // Switch to different state set for afterTx hooks + this.stateServiceProvider.popCurrentStateService(); + + const afterBatch = await this.executeTransactionHooks( + async (module, args) => await module.afterTransaction(args), + afterTxHookArguments ); + state.pendingSTBatches.push(afterBatch); // Check transaction integrity against appProof const blockTransactionHash = transaction.hash(); blockTransactionHash.assertEquals( - runtimeProof.publicOutput.transactionHash, + runtimeOutput.transactionHash, "Transactions provided in AppProof and BlockProof do not match" ); @@ -246,236 +225,333 @@ export class BlockProverProgrammable extends ZkProgrammable< transaction.assertTransactionType(isMessage); // Check network state integrity against appProof - state.networkStateHash.assertEquals( - runtimeProof.publicOutput.networkStateHash, - "Network state does not match state used in AppProof" - ); - state.networkStateHash.assertEquals( - networkState.hash(), - "Network state provided to BlockProver does not match the publicInput" - ); + state.networkState + .hash() + .assertEquals( + runtimeOutput.networkStateHash, + "Network state does not match state used in AppProof" + ); - return stateTo; + return state; } // eslint-disable-next-line max-len // TODO How does this interact with the RuntimeMethodExecutionContext when executing runtimemethods? - public async assertProtocolTransitions( - stateTransitionProof: Proof< - StateTransitionProverPublicInput, - StateTransitionProverPublicOutput - >, - executionData: BlockProverExecutionData, - runtimeProof: DynamicProof + /** + * Constructs a AppliedBatch based on a list of STs and the flag whether to + * be applied or not. The AppliedBatch is a condensed commitment to a batch + * of STs. + */ + private constructBatch( + stateTransitions: StateTransition[], + applied: Bool ) { - const executionContext = container.resolve(RuntimeMethodExecutionContext); - executionContext.clear(); - - // Setup context for potential calls to runtime methods. - // This way they can use this.transaction etc. while still having provable - // integrity between data - executionContext.setup({ - // That is why we should probably hide it from the transaction context inputs - transaction: executionData.transaction, - networkState: executionData.networkState, - }); - executionContext.beforeMethod("", "", []); - - for (const module of this.transactionHooks) { - // eslint-disable-next-line no-await-in-loop - await module.onTransaction(executionData); - } - - executionContext.afterMethod(); - - const { stateTransitions, status, statusMessage } = - executionContext.current().result; - - status.assertTrue(statusMessage); - const transitions = stateTransitions.map((transition) => transition.toProvable() ); - const hashList = new StateTransitionReductionList( - ProvableStateTransition, - stateTransitionProof.publicInput.protocolTransitionsHash - ); - + const hashList = new StateTransitionReductionList(ProvableStateTransition); transitions.forEach((transition) => { hashList.push(transition); }); - stateTransitionProof.publicOutput.protocolTransitionsHash.assertEquals( - hashList.commitment, - "ProtocolTransitionsHash not matching the generated protocol transitions" - ); + return new AppliedStateTransitionBatch({ + batchHash: hashList.commitment, + applied, + }); } - private async executeBlockHooks( - state: BlockProverState, - inputNetworkState: NetworkState, - type: "afterBlock" | "beforeBlock" - ): Promise<{ - networkState: NetworkState; - stateTransitions: StateTransition[]; - }> { + private async executeTransactionHooks< + T extends BeforeTransactionHookArguments | AfterTransactionHookArguments, + >( + hook: (module: ProvableTransactionHook, args: T) => Promise, + hookArguments: T + ) { + const { batch } = await this.executeHooks(hookArguments, async () => { + for (const module of this.transactionHooks) { + // eslint-disable-next-line no-await-in-loop + await hook(module, hookArguments); + } + }); + return batch; + } + + private async executeHooks( + contextArguments: RuntimeMethodExecutionData, + method: () => Promise + ) { const executionContext = container.resolve(RuntimeMethodExecutionContext); executionContext.clear(); + + // Setup context for potential calls to runtime methods. + // This way they can use this.transaction etc. while still having provable + // integrity between data + executionContext.setup(contextArguments); executionContext.beforeMethod("", "", []); - const resultingNetworkState = await this.blockHooks.reduce< - Promise - >(async (networkStatePromise, blockHook) => { - const networkState = await networkStatePromise; - // Setup context for potential calls to runtime methods. - // With the special case that we set the new networkstate for every hook - // We also have to put in a dummy transaction for network.transaction - executionContext.setup({ - transaction: RuntimeTransaction.dummyTransaction(), - networkState, - }); - - if (type === "beforeBlock") { - return await blockHook.beforeBlock(networkState, state); - } - if (type === "afterBlock") { - return await blockHook.afterBlock(networkState, state); - } - throw new Error("Unreachable"); - }, Promise.resolve(inputNetworkState)); + const result = await method(); executionContext.afterMethod(); const { stateTransitions, status, statusMessage } = executionContext.current().result; - status.assertTrue(`Block hook call failed: ${statusMessage ?? "-"}`); + status.assertTrue(`Transaction hook call failed: ${statusMessage ?? "-"}`); return { - networkState: resultingNetworkState, - stateTransitions, + batch: this.constructBatch(stateTransitions, Bool(true)), + result, }; } - private addTransactionToBundle( - state: BlockProverState, - isMessage: Bool, - transaction: RuntimeTransaction - ): BlockProverState { - const stateTo = { - ...state, + public async executeBlockHooks< + T extends BeforeBlockHookArguments | AfterBlockHookArguments, + >( + hook: ( + module: ProvableBlockHook, + networkState: NetworkState, + args: T + ) => Promise, + hookArguments: T, + inputNetworkState: NetworkState + ) { + const transaction = RuntimeTransaction.dummyTransaction(); + const startingInputs = { + transaction, + networkState: inputNetworkState, }; + return await this.executeHooks(startingInputs, async () => { + const executionContext = container.resolve(RuntimeMethodExecutionContext); + + return await this.blockHooks.reduce>( + async (networkStatePromise, blockHook) => { + const networkState = await networkStatePromise; + + // Setup context for potential calls to runtime methods. + // With the special case that we set the new networkstate for every hook + // We also have to put in a dummy transaction for network.transaction + executionContext.setup({ + transaction: RuntimeTransaction.dummyTransaction(), + networkState, + }); + + return await hook(blockHook, networkState, hookArguments); + }, + Promise.resolve(inputNetworkState) + ); + }); + } + + public addTransactionToBundle< + T extends Pick< + BlockProverState, + "transactionList" | "eternalTransactionsList" | "incomingMessages" + >, + >(state: T, isMessage: Bool, transaction: RuntimeTransaction): T { const transactionHash = transaction.hash(); // Append tx to transaction list - const transactionList = new DefaultProvableHashList( - Field, - state.transactionsHash - ); - - transactionList.pushIf(transactionHash, isMessage.not()); - stateTo.transactionsHash = transactionList.commitment; + state.transactionList.pushIf(transactionHash, isMessage.not()); // Append tx to eternal transaction list // TODO Change that to the a sequence-state compatible transaction struct - const eternalTransactionList = new DefaultProvableHashList( - Field, - state.eternalTransactionsHash - ); - - eternalTransactionList.pushIf(transactionHash, isMessage.not()); - stateTo.eternalTransactionsHash = eternalTransactionList.commitment; + state.eternalTransactionsList.pushIf(transactionHash, isMessage.not()); // Append tx to incomingMessagesHash const actionHash = MinaActions.actionHash(transaction.hashData()); - const incomingMessagesList = new MinaActionsHashList( - state.incomingMessagesHash - ); - incomingMessagesList.pushIf(actionHash, isMessage); + state.incomingMessages.pushIf(actionHash, isMessage); - stateTo.incomingMessagesHash = incomingMessagesList.commitment; + return state; + } - return stateTo; + private verifyVerificationKeyAttestation( + attestation: RuntimeVerificationKeyAttestation, + methodId: Field + ): VerificationKey { + // Verify the [methodId, vk] tuple against the baked-in vk tree root + const { verificationKey, witness: verificationKeyTreeWitness } = + attestation; + + const root = Field(this.verificationKeyService.getRoot()); + const calculatedRoot = verificationKeyTreeWitness.calculateRoot( + new MethodVKConfigData({ + methodId: methodId, + vkHash: verificationKey.hash, + }).hash() + ); + root.assertEquals(calculatedRoot, errors.invalidZkProgramTreeRoot()); + + return verificationKey; } - @provableMethod() - public async proveTransaction( - publicInput: BlockProverPublicInput, - stateProof: StateTransitionProof, + public async proveTransactionInternal( + fromState: BlockProverState, runtimeProof: DynamicRuntimeProof, - executionData: BlockProverExecutionData, - verificationKeyWitness: RuntimeVerificationKeyAttestation - ): Promise { - const state: BlockProverState = { - ...publicInput, - }; + { transaction, networkState }: BlockProverSingleTransactionExecutionData + ): Promise { + const verificationKey = this.verifyVerificationKeyAttestation( + transaction.verificationKeyAttestation, + transaction.transaction.methodId + ); - state.networkStateHash.assertEquals( - executionData.networkState.hash(), - "ExecutionData Networkstate doesn't equal public input hash" + runtimeProof.verify(verificationKey); + + return await this.applyTransaction( + fromState, + runtimeProof.publicOutput, + transaction, + networkState ); + } + private staticChecks(publicInput: BlockProverPublicInput) { publicInput.blockNumber.assertEquals( MAX_FIELD, "blockNumber has to be MAX for transaction proofs" ); + } - // Verify the [methodId, vk] tuple against the baked-in vk tree root - const { verificationKey, witness: verificationKeyTreeWitness } = - verificationKeyWitness; - - const root = Field(this.verificationKeyService.getRoot()); - const calculatedRoot = verificationKeyTreeWitness.calculateRoot( - new MethodVKConfigData({ - methodId: executionData.transaction.methodId, - vkHash: verificationKey.hash, - }).hash() + @provableMethod() + public async proveTransaction( + publicInput: BlockProverPublicInput, + runtimeProof: DynamicRuntimeProof, + executionData: BlockProverSingleTransactionExecutionData + ): Promise { + const state = BlockProverStateCommitments.toBlockProverState( + publicInput, + executionData.networkState ); - root.assertEquals(calculatedRoot, errors.invalidZkProgramTreeRoot()); - const bundleInclusionState = this.addTransactionToBundle( + this.staticChecks(publicInput); + + const stateTo = await this.proveTransactionInternal( state, - runtimeProof.publicOutput.isMessage, - executionData.transaction + runtimeProof, + executionData ); - const stateTo = await this.applyTransaction( - bundleInclusionState, - stateProof, - runtimeProof, - executionData, - verificationKey + return new BlockProverPublicOutput({ + ...BlockProverStateCommitments.fromBlockProverState(stateTo), + closed: Bool(false), + }); + } + + @provableMethod() + public async proveTransactions( + publicInput: BlockProverPublicInput, + runtimeProof1: DynamicRuntimeProof, + runtimeProof2: DynamicRuntimeProof, + executionData: BlockProverMultiTransactionExecutionData + ): Promise { + const state = BlockProverStateCommitments.toBlockProverState( + publicInput, + executionData.networkState ); + this.staticChecks(publicInput); + + const state1 = await this.proveTransactionInternal(state, runtimeProof1, { + transaction: executionData.transaction1, + networkState: executionData.networkState, + }); + + // Switch to next state record for 2nd tx beforeTx hook + // TODO Can be prevented by merging 1st afterTx + 2nd beforeTx + this.stateServiceProvider.popCurrentStateService(); + + const stateTo = await this.proveTransactionInternal(state1, runtimeProof2, { + transaction: executionData.transaction2, + networkState: executionData.networkState, + }); + return new BlockProverPublicOutput({ - ...stateTo, - blockNumber: publicInput.blockNumber, + ...BlockProverStateCommitments.fromBlockProverState(stateTo), closed: Bool(false), }); } - private assertSTProofInput( + public includeSTProof( stateTransitionProof: StateTransitionProof, - stateRoot: Field - ) { - stateTransitionProof.publicInput.stateTransitionsHash.assertEquals( + apply: Bool, + stateRoot: Field, + pendingSTBatchesHash: Field, + witnessedRootsHash: Field + ): { + stateRoot: Field; + pendingSTBatchesHash: Field; + witnessedRootsHash: Field; + } { + assertEqualsIf( + stateTransitionProof.publicInput.currentBatchStateHash, Field(0), - errors.stateProofNotStartingAtZero() + apply, + "State for STProof has to be empty at the start" ); - stateTransitionProof.publicInput.protocolTransitionsHash.assertEquals( + assertEqualsIf( + stateTransitionProof.publicOutput.currentBatchStateHash, Field(0), - errors.stateProofNotStartingAtZero() + apply, + "State for STProof has to be empty at the end" + ); + + assertEqualsIf( + stateTransitionProof.publicInput.batchesHash, + Field(0), + apply, + "Batcheshash doesn't start at 0" ); // Assert from state roots - stateRoot.assertEquals( - stateTransitionProof.publicInput.stateRoot, + assertEqualsIf( + stateRoot, + stateTransitionProof.publicInput.root, + apply, + errors.propertyNotMatching("from state root") + ); + // Assert the stBatchesHash executed is the same + assertEqualsIf( + pendingSTBatchesHash, + stateTransitionProof.publicOutput.batchesHash, + apply, + "Pending STBatches are not the same that have been executed by the ST proof" + ); + + // Assert root Accumulator + assertEqualsIf( + Field(0), + stateTransitionProof.publicInput.witnessedRootsHash, + apply, errors.propertyNotMatching("from state root") ); + // Assert the witnessedRootsHash created is the same + assertEqualsIf( + witnessedRootsHash, + stateTransitionProof.publicOutput.witnessedRootsHash, + apply, + "Root accumulator Commitment is not the same that have been executed by the ST proof" + ); + + // update root only if we didn't defer + const newRoot = Provable.if( + apply, + stateTransitionProof.publicOutput.root, + stateRoot + ); + // Reset only if we didn't defer + const newBatchesHash = Provable.if(apply, Field(0), pendingSTBatchesHash); + const newWitnessedRootsHash = Provable.if( + apply, + Field(0), + witnessedRootsHash + ); + return { + stateRoot: newRoot, + pendingSTBatchesHash: newBatchesHash, + witnessedRootsHash: newWitnessedRootsHash, + }; } @provableMethod() @@ -483,27 +559,18 @@ export class BlockProverProgrammable extends ZkProgrammable< publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, - // stateTransitionProof: StateTransitionProof, + stateTransitionProof: StateTransitionProof, + deferSTProof: Bool, + afterBlockRootWitness: WitnessedRootWitness, transactionProof: BlockProverProof ): Promise { - const state: BlockProverState = { - ...publicInput, - }; - // 1. Make assertions about the inputs publicInput.transactionsHash.assertEquals( Field(0), "Transactionshash has to start at 0" ); - publicInput.networkStateHash.assertEquals( - networkState.hash(), - "Wrong NetworkState supplied" - ); - transactionProof.publicInput.transactionsHash.assertEquals( - Field(0), - "TransactionProof transactionshash has to start at 0" - ); + // TransactionProof format checks transactionProof.publicInput.blockHashRoot.assertEquals( Field(0), "TransactionProof cannot carry the blockHashRoot - publicInput" @@ -516,25 +583,11 @@ export class BlockProverProgrammable extends ZkProgrammable< transactionProof.publicOutput.networkStateHash, "TransactionProof cannot alter the network state" ); - transactionProof.publicInput.eternalTransactionsHash.assertEquals( - state.eternalTransactionsHash, - "TransactionProof starting eternalTransactionHash not matching" - ); - transactionProof.publicInput.incomingMessagesHash.assertEquals( - state.incomingMessagesHash, - "TransactionProof starting incomingMessagesHash not matching" - ); - // TODO Reintroduce ST Proofs - // Verify ST Proof only if STs have been emitted, - // otherwise we can input a dummy proof - // const stsEmitted = stateTransitionProof.publicOutput.stateTransitionsHash - // .equals(0) - // .and(stateTransitionProof.publicOutput.protocolTransitionsHash.equals(0)) - // .not(); - // Provable.log("VerifyIf 1", stsEmitted); - // stateTransitionProof.verifyIf(Bool(false)); - // stateTransitionProof.verifyIf(stsEmitted); + const state = BlockProverStateCommitments.toBlockProverState( + publicInput, + networkState + ); // Verify Transaction proof if it has at least 1 tx - i.e. the // input and output doesn't match fully @@ -545,87 +598,75 @@ export class BlockProverProgrammable extends ZkProgrammable< transactionProof.publicInput, txProofOutput.closed ); - Provable.log("VerifyIf 2", isEmptyTransition.not()); - transactionProof.verifyIf(isEmptyTransition.not()); + const skipTransactionProofVerification = isEmptyTransition; + const verifyTransactionProof = isEmptyTransition.not(); + log.provable.debug("VerifyIf TxProof", verifyTransactionProof); + transactionProof.verifyIf(verifyTransactionProof); // 2. Execute beforeBlock hooks + const beforeBlockArgs = toProvableHookBlockState(state); const beforeBlockResult = await this.executeBlockHooks( - state, - networkState, - "beforeBlock" + async (module, networkStateArg, args) => + await module.beforeBlock(networkStateArg, args), + beforeBlockArgs, + networkState ); - // const beforeBlockHashList = new StateTransitionReductionList( - // ProvableStateTransition - // ); - // beforeBlockResult.stateTransitions.forEach((st) => { - // beforeBlockHashList.push(st.toProvable()); - // }); - - // We are reusing protocolSTs here as beforeBlock STs - // TODO Not possible atm bcs we can't have a seperation between protocol/runtime state roots, - // which we would for both before and after to be able to emit STs - - // stateTransitionProof.publicInput.protocolTransitionsHash.assertEquals( - // beforeBlockHashList.commitment - // ); - // state.stateRoot = stateTransitionProof.publicInput.protocolStateRoot; - - // TODO Only for now - // beforeBlockHashList.commitment.assertEquals( - // Field(0), - // "beforeBlock() cannot emit state transitions yet" - // ); + state.pendingSTBatches.push(beforeBlockResult.batch); // 4. Apply TX-type BlockProof - transactionProof.publicInput.networkStateHash.assertEquals( - beforeBlockResult.networkState.hash(), - "TransactionProof networkstate hash not matching beforeBlock hook result" - ); + transactionProof.publicInput.networkStateHash + .equals(beforeBlockResult.result.hash()) + .or(skipTransactionProofVerification) + .assertTrue( + "TransactionProof networkstate hash not matching beforeBlock hook result" + ); transactionProof.publicInput.stateRoot.assertEquals( - state.stateRoot, - "TransactionProof input state root not matching blockprover state root" + transactionProof.publicOutput.stateRoot, + "TransactionProofs can't change the state root" ); - state.stateRoot = transactionProof.publicOutput.stateRoot; - state.transactionsHash = transactionProof.publicOutput.transactionsHash; - state.eternalTransactionsHash = - transactionProof.publicOutput.eternalTransactionsHash; - state.incomingMessagesHash = - transactionProof.publicOutput.incomingMessagesHash; - - // 5. Execute afterBlock hooks - // this.assertSTProofInput(stateTransitionProof, state.stateRoot); - - const afterBlockResult = await this.executeBlockHooks( - state, - beforeBlockResult.networkState, - "afterBlock" + // Check that the transaction proof's STs start after the beforeBlock hook + transactionProof.publicInput.pendingSTBatchesHash.assertEquals( + state.pendingSTBatches.commitment, + "Transaction proof doesn't start their STs after the beforeBlockHook" ); - - const afterBlockHashList = new StateTransitionReductionList( - ProvableStateTransition - ); - afterBlockResult.stateTransitions.forEach((st) => { - afterBlockHashList.push(st.toProvable()); + // Fast-forward the stBatchHashList to after all transactions appended + state.pendingSTBatches.commitment = + transactionProof.publicOutput.pendingSTBatchesHash; + + // Fast-forward block content commitments by the results of the aggregated transaction proof + // Implicitly, the 'from' values here are asserted against the publicInput, since the hashlists + // are created out of the public input + state.transactionList.fastForward({ + from: transactionProof.publicInput.transactionsHash, + to: transactionProof.publicOutput.transactionsHash, + }); + state.eternalTransactionsList.fastForward({ + from: transactionProof.publicInput.eternalTransactionsHash, + to: transactionProof.publicOutput.eternalTransactionsHash, + }); + state.incomingMessages.fastForward({ + from: transactionProof.publicInput.incomingMessagesHash, + to: transactionProof.publicOutput.incomingMessagesHash, }); - state.networkStateHash = afterBlockResult.networkState.hash(); - - // We are reusing runtime STs here as afterBlock STs - // stateTransitionProof.publicInput.protocolTransitionsHash.assertEquals( - // afterBlockHashList.commitment, - // "STProof from-ST-hash not matching generated ST-hash from afterBlock hooks" - // ); - // state.stateRoot = Provable.if( - // stsEmitted, - // stateTransitionProof.publicOutput.stateRoot, - // state.stateRoot - // ); + // Witness root + const isEmpty = state.pendingSTBatches.commitment.equals(0); + isEmpty + .implies(state.stateRoot.equals(afterBlockRootWitness.witnessedRoot)) + .assertTrue(); - // 6. Close block + state.witnessedRoots.witnessRoot( + { + appliedBatchListState: state.pendingSTBatches.commitment, + root: afterBlockRootWitness.witnessedRoot, + }, + afterBlockRootWitness.preimage, + isEmpty.not() + ); - // Calculate the new block index + // 5. Calculate the new block tree hash const blockIndex = blockWitness.calculateIndex(); blockIndex.assertEquals(publicInput.blockNumber); @@ -639,15 +680,60 @@ export class BlockProverProgrammable extends ZkProgrammable< state.blockHashRoot = blockWitness.calculateRoot( new BlockHashTreeEntry({ - // Mirroring UnprovenBlock.hash() - blockHash: Poseidon.hash([blockIndex, state.transactionsHash]), + block: { + index: blockIndex, + transactionListHash: state.transactionList.commitment, + }, closed: Bool(true), }).hash() ); + // 6. Execute afterBlock hooks + + // Switch state service to afterBlock one + this.stateServiceProvider.popCurrentStateService(); + + const afterBlockHookArgs = toProvableHookBlockState(state); + const afterBlockResult = await this.executeBlockHooks( + async (module, networkStateArg, args) => + await module.afterBlock(networkStateArg, args), + { + ...afterBlockHookArgs, + stateRoot: afterBlockRootWitness.witnessedRoot, + }, + beforeBlockResult.result + ); + + state.pendingSTBatches.push(afterBlockResult.batch); + + state.networkState = afterBlockResult.result; + + // 7. Close block + + // Verify ST Proof only if STs have been emitted, + // and we don't defer the verification of the STs + // otherwise we can input a dummy proof + const batchesEmpty = state.pendingSTBatches.commitment.equals(Field(0)); + const verifyStProof = deferSTProof.not().and(batchesEmpty.not()); + log.provable.debug("Verify STProof", verifyStProof); + stateTransitionProof.verifyIf(verifyStProof); + + // Apply STProof if not deferred + const stateProofResult = this.includeSTProof( + stateTransitionProof, + verifyStProof, + state.stateRoot, + state.pendingSTBatches.commitment, + state.witnessedRoots.commitment + ); + state.stateRoot = stateProofResult.stateRoot; + state.pendingSTBatches.commitment = stateProofResult.pendingSTBatchesHash; + state.witnessedRoots.commitment = stateProofResult.witnessedRootsHash; + + state.blockNumber = blockIndex.add(1); + return new BlockProverPublicOutput({ - ...state, - blockNumber: blockIndex.add(1), + ...BlockProverStateCommitments.fromBlockProverState(state), closed: Bool(true), }); } @@ -732,6 +818,26 @@ export class BlockProverProgrammable extends ZkProgrammable< ) ); + // Check pendingSTBatchesHash + publicInput.pendingSTBatchesHash.assertEquals( + proof1.publicInput.pendingSTBatchesHash, + errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + ); + proof1.publicOutput.pendingSTBatchesHash.assertEquals( + proof2.publicInput.pendingSTBatchesHash, + errors.transactionsHashNotMatching("proof1.to -> proof2.from") + ); + + // Check witnessedRootsHash + publicInput.witnessedRootsHash.assertEquals( + proof1.publicInput.witnessedRootsHash, + errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + ); + proof1.publicOutput.witnessedRootsHash.assertEquals( + proof2.publicInput.witnessedRootsHash, + errors.transactionsHashNotMatching("proof1.to -> proof2.from") + ); + // Assert closed indicator matches // (i.e. we can only merge TX-Type and Block-Type with each other) proof1.publicOutput.closed.assertEquals( @@ -787,6 +893,8 @@ export class BlockProverProgrammable extends ZkProgrammable< incomingMessagesHash: proof2.publicOutput.incomingMessagesHash, closed: isValidClosedMerge, blockNumber: proof2.publicOutput.blockNumber, + pendingSTBatchesHash: proof2.publicOutput.pendingSTBatchesHash, + witnessedRootsHash: proof2.publicOutput.witnessedRootsHash, }); } @@ -802,6 +910,7 @@ export class BlockProverProgrammable extends ZkProgrammable< const { prover, stateTransitionProver } = this; const StateTransitionProofClass = stateTransitionProver.zkProgram[0].Proof; const proveTransaction = prover.proveTransaction.bind(prover); + const proveTransactions = prover.proveTransactions.bind(prover); const proveBlock = prover.proveBlock.bind(prover); const merge = prover.merge.bind(prover); @@ -813,25 +922,41 @@ export class BlockProverProgrammable extends ZkProgrammable< methods: { proveTransaction: { privateInputs: [ - StateTransitionProofClass, DynamicRuntimeProof, - BlockProverExecutionData, - RuntimeVerificationKeyAttestation, + BlockProverSingleTransactionExecutionData, ], async method( publicInput: BlockProverPublicInput, - stateProof: StateTransitionProof, - appProof: DynamicRuntimeProof, - executionData: BlockProverExecutionData, - verificationKeyAttestation: RuntimeVerificationKeyAttestation + runtimeProof: DynamicRuntimeProof, + executionData: BlockProverSingleTransactionExecutionData ) { return await proveTransaction( publicInput, - stateProof, - appProof, - executionData, - verificationKeyAttestation + runtimeProof, + executionData + ); + }, + }, + + proveTransactions: { + privateInputs: [ + DynamicRuntimeProof, + DynamicRuntimeProof, + BlockProverMultiTransactionExecutionData, + ], + + async method( + publicInput: BlockProverPublicInput, + runtimeProof1: DynamicRuntimeProof, + runtimeProof2: DynamicRuntimeProof, + executionData: BlockProverMultiTransactionExecutionData + ) { + return await proveTransactions( + publicInput, + runtimeProof1, + runtimeProof2, + executionData ); }, }, @@ -840,21 +965,27 @@ export class BlockProverProgrammable extends ZkProgrammable< privateInputs: [ NetworkState, BlockHashMerkleTreeWitness, - // StateTransitionProofClass, + StateTransitionProofClass, + Bool, + WitnessedRootWitness, SelfProof, ], async method( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, - // stateTransitionProof: StateTransitionProof, + stateTransitionProof: StateTransitionProof, + deferSTs: Bool, + afterBlockRootWitness: WitnessedRootWitness, transactionProof: BlockProverProof ) { return await proveBlock( publicInput, networkState, blockWitness, - // stateTransitionProof, + stateTransitionProof, + deferSTs, + afterBlockRootWitness, transactionProof ); }, @@ -879,6 +1010,7 @@ export class BlockProverProgrammable extends ZkProgrammable< const methods = { proveTransaction: program.proveTransaction, + proveTransactions: program.proveTransactions, proveBlock: program.proveBlock, merge: program.merge, }; @@ -924,15 +1056,17 @@ export class BlockProver transactionHooks: ProvableTransactionHook[], @injectAll("ProvableBlockHook") blockHooks: ProvableBlockHook[], + @inject("StateServiceProvider") + stateServiceProvider: StateServiceProvider, verificationKeyService: RuntimeVerificationKeyRootService ) { super(); this.zkProgrammable = new BlockProverProgrammable( this, stateTransitionProver.zkProgrammable, - runtime.zkProgrammable, transactionHooks, blockHooks, + stateServiceProvider, verificationKeyService ); } @@ -950,17 +1084,27 @@ export class BlockProver public proveTransaction( publicInput: BlockProverPublicInput, - stateProof: StateTransitionProof, - appProof: DynamicRuntimeProof, - executionData: BlockProverExecutionData, - verificationKeyAttestation: RuntimeVerificationKeyAttestation + runtimeProof: DynamicRuntimeProof, + executionData: BlockProverSingleTransactionExecutionData ): Promise { return this.zkProgrammable.proveTransaction( publicInput, - stateProof, - appProof, - executionData, - verificationKeyAttestation + runtimeProof, + executionData + ); + } + + public proveTransactions( + publicInput: BlockProverPublicInput, + runtimeProof1: DynamicRuntimeProof, + runtimeProof2: DynamicRuntimeProof, + executionData: BlockProverMultiTransactionExecutionData + ): Promise { + return this.zkProgrammable.proveTransactions( + publicInput, + runtimeProof1, + runtimeProof2, + executionData ); } @@ -968,14 +1112,18 @@ export class BlockProver publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, - // stateTransitionProof: StateTransitionProof, + stateTransitionProof: StateTransitionProof, + deferSTs: Bool, + afterBlockRootWitness: WitnessedRootWitness, transactionProof: BlockProverProof ): Promise { return this.zkProgrammable.proveBlock( publicInput, networkState, blockWitness, - // stateTransitionProof, + stateTransitionProof, + deferSTs, + afterBlockRootWitness, transactionProof ); } diff --git a/packages/protocol/src/prover/block/accummulators/BlockHashMerkleTree.ts b/packages/protocol/src/prover/block/accummulators/BlockHashMerkleTree.ts index 317061640..e80d4b6b8 100644 --- a/packages/protocol/src/prover/block/accummulators/BlockHashMerkleTree.ts +++ b/packages/protocol/src/prover/block/accummulators/BlockHashMerkleTree.ts @@ -5,12 +5,20 @@ export class BlockHashMerkleTree extends createMerkleTree(40) {} export class BlockHashMerkleTreeWitness extends BlockHashMerkleTree.WITNESS {} export class BlockHashTreeEntry extends Struct({ - blockHash: Field, + block: Struct({ + index: Field, + transactionListHash: Field, + }), closed: Bool, // TODO We could add startingEternalTransactionsHash here to offer - // a more trivial connection to the sequence state + // a more trivial connection to the sequence state }) { public hash(): Field { - return Poseidon.hash([this.blockHash, ...this.closed.toFields()]); + // Mirroring Block.hash() + const blockHash = Poseidon.hash([ + this.block.index, + this.block.transactionListHash, + ]); + return Poseidon.hash([blockHash, ...this.closed.toFields()]); } } diff --git a/packages/protocol/src/prover/statetransition/StateTransitionProvable.ts b/packages/protocol/src/prover/statetransition/StateTransitionProvable.ts index d4f7b0a4a..c80b471bd 100644 --- a/packages/protocol/src/prover/statetransition/StateTransitionProvable.ts +++ b/packages/protocol/src/prover/statetransition/StateTransitionProvable.ts @@ -1,20 +1,24 @@ import { Field, Proof, Struct } from "o1js"; import { WithZkProgrammable, CompilableModule } from "@proto-kit/common"; -import { StateTransitionProvableBatch } from "../../model/StateTransitionProvableBatch"; +import { + MerkleWitnessBatch, + StateTransitionProvableBatch, +} from "../../model/StateTransitionProvableBatch"; +import { AppliedStateTransitionBatchState } from "../../model/AppliedStateTransitionBatch"; export class StateTransitionProverPublicInput extends Struct({ - stateTransitionsHash: Field, - protocolTransitionsHash: Field, - stateRoot: Field, - protocolStateRoot: Field, + batchesHash: Field, + currentBatchStateHash: Field, + root: Field, + witnessedRootsHash: Field, }) {} export class StateTransitionProverPublicOutput extends Struct({ - stateTransitionsHash: Field, - protocolTransitionsHash: Field, - stateRoot: Field, - protocolStateRoot: Field, + batchesHash: Field, + currentBatchStateHash: Field, + root: Field, + witnessedRootsHash: Field, }) {} export type StateTransitionProof = Proof< @@ -28,9 +32,11 @@ export interface StateTransitionProvable StateTransitionProverPublicOutput >, CompilableModule { - runBatch: ( + proveBatch: ( publicInput: StateTransitionProverPublicInput, - batch: StateTransitionProvableBatch + batch: StateTransitionProvableBatch, + witnesses: MerkleWitnessBatch, + currentAppliedBatch: AppliedStateTransitionBatchState ) => Promise; merge: ( diff --git a/packages/protocol/src/prover/statetransition/StateTransitionProver.ts b/packages/protocol/src/prover/statetransition/StateTransitionProver.ts index c2925b2c3..6adaf7e76 100644 --- a/packages/protocol/src/prover/statetransition/StateTransitionProver.ts +++ b/packages/protocol/src/prover/statetransition/StateTransitionProver.ts @@ -14,15 +14,16 @@ import { injectable } from "tsyringe"; import { constants } from "../../Constants"; import { ProvableStateTransition } from "../../model/StateTransition"; import { - ProvableStateTransitionType, + MerkleWitnessBatch, StateTransitionProvableBatch, + StateTransitionType, } from "../../model/StateTransitionProvableBatch"; import { StateTransitionProverType } from "../../protocol/Protocol"; import { ProtocolModule } from "../../protocol/ProtocolModule"; -import { - DefaultProvableHashList, - ProvableHashList, -} from "../../utils/ProvableHashList"; +import { DefaultProvableHashList } from "../../utils/ProvableHashList"; +import { WitnessedRootHashList } from "../accumulators/WitnessedRootHashList"; +import { AppliedBatchHashList } from "../accumulators/AppliedBatchHashList"; +import { AppliedStateTransitionBatchState } from "../../model/AppliedStateTransitionBatch"; import { StateTransitionProof, @@ -35,31 +36,15 @@ const errors = { propertyNotMatching: (property: string, step: string) => `${property} not matching ${step}`, - merkleWitnessNotCorrect: ( - index: number, - type: ProvableStateTransitionType - ) => { - let s = `MerkleWitness not valid for StateTransition (${index}, type unknown)`; - Provable.asProver(() => { - s = s.replace( - "unknown", - type.isNormal().toBoolean() ? "normal" : "protocol" - ); - }); - return s; - }, - - noWitnessProviderSet: () => - new Error( - "WitnessProvider not set, set it before you use StateTransitionProvider" - ), + merkleWitnessNotCorrect: (index: number) => + `MerkleWitness not valid for StateTransition (${index})`, }; interface StateTransitionProverExecutionState { - stateRoot: Field; - protocolStateRoot: Field; - stateTransitionList: ProvableHashList; - protocolTransitionList: ProvableHashList; + currentBatch: AppliedStateTransitionBatchState; + batchList: AppliedBatchHashList; + finalizedRoot: Field; + witnessedRoots: WitnessedRootHashList; } const StateTransitionSelfProofClass = SelfProof< @@ -76,7 +61,10 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< StateTransitionProverPublicOutput > { public constructor( - private readonly stateTransitionProver: StateTransitionProver + private readonly stateTransitionProver: Pick< + StateTransitionProver, + "areProofsEnabled" + > ) { super(); } @@ -97,14 +85,25 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< publicOutput: StateTransitionProverPublicOutput, methods: { - runBatch: { - privateInputs: [StateTransitionProvableBatch], + proveBatch: { + privateInputs: [ + StateTransitionProvableBatch, + MerkleWitnessBatch, + AppliedStateTransitionBatchState, + ], async method( publicInput: StateTransitionProverPublicInput, - batch: StateTransitionProvableBatch + batch: StateTransitionProvableBatch, + witnesses: MerkleWitnessBatch, + currentAppliedBatch: AppliedStateTransitionBatchState ) { - return await instance.runBatch(publicInput, batch); + return await instance.proveBatch( + publicInput, + batch, + witnesses, + currentAppliedBatch + ); }, }, @@ -126,7 +125,7 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< }); const methods = { - runBatch: program.runBatch.bind(program), + proveBatch: program.proveBatch.bind(program), merge: program.merge.bind(program), }; @@ -149,42 +148,94 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< * and returns the new prover state */ public applyTransitions( - stateRoot: Field, - protocolStateRoot: Field, - stateTransitionCommitmentFrom: Field, - protocolTransitionCommitmentFrom: Field, - transitionBatch: StateTransitionProvableBatch - ): StateTransitionProverExecutionState { - const state: StateTransitionProverExecutionState = { - stateRoot, - protocolStateRoot, - - stateTransitionList: new DefaultProvableHashList( - ProvableStateTransition, - stateTransitionCommitmentFrom - ), - - protocolTransitionList: new DefaultProvableHashList( - ProvableStateTransition, - protocolTransitionCommitmentFrom - ), - }; + state: StateTransitionProverExecutionState, + batch: StateTransitionProvableBatch, + witnesses: MerkleWitnessBatch + ) { + const transitions = batch.batch; - const transitions = transitionBatch.batch; - const types = transitionBatch.transitionTypes; - const merkleWitness = transitionBatch.merkleWitnesses; for ( let index = 0; index < constants.stateTransitionProverBatchSize; index++ ) { - this.applyTransition( - state, - transitions[index], - types[index], - merkleWitness[index], + const updatedBatchState = this.applyTransition( + state.currentBatch, + transitions[index].stateTransition, + witnesses.witnesses[index], index ); + + // If the current batch is finished, we push it to the list + // and initialize the next + const { type, witnessRoot } = transitions[index]; + const closing = type.isClosing(); + const closingAndApply = type.type.equals( + StateTransitionType.closeAndApply + ); + + // Create the newBatch + // The root is based on if the previous batch will be applied or not + const base = Provable.if( + closingAndApply, + updatedBatchState.root, + state.finalizedRoot + ); + const newBatchState = new AppliedStateTransitionBatchState({ + batchHash: Field(0), + root: base, + }); + + const updatedBatch = { + applied: closingAndApply, + batchHash: updatedBatchState.batchHash, + }; + state.batchList.pushIf(updatedBatch, closing); + state.finalizedRoot = Provable.if( + closingAndApply, + updatedBatchState.root, + state.finalizedRoot + ); + + // Add computed root to the witnessed root list if needed + witnessRoot + .implies(closing) + .assertTrue("Can only witness roots at closing batches"); + state.witnessedRoots.pushIf( + { + root: state.finalizedRoot, + appliedBatchListState: state.batchList.commitment, + }, + witnessRoot + ); + + const isDummy = ProvableStateTransition.isDummy( + transitions[index].stateTransition + ); + + // Dummy STs cannot change any state, as to prevent any + // dummy-in-the-middle attacks. This is given if the type is nothing. + isDummy + .implies(type.isNothing()) + .assertTrue("Dummies have to be of type 'nothing'"); + + isDummy + .implies(state.currentBatch.batchHash.equals(0)) + .assertTrue("Dummies can only be placed on closed batchLists"); + + // Dummies don't close the batch, but we still want to ignore any + // updated batch, since we need to result to stay. + // This will break the pipeline if there is a dummy in the middle, + // but will only end up to invalid proofs (i.e. mismatched batches) + + state.currentBatch = new AppliedStateTransitionBatchState( + Provable.if( + closing.or(isDummy), + AppliedStateTransitionBatchState, + newBatchState, + updatedBatchState + ) + ); } return state; @@ -195,71 +246,91 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< * and mutates it in place */ public applyTransition( - state: StateTransitionProverExecutionState, + currentBatch: AppliedStateTransitionBatchState, transition: ProvableStateTransition, - type: ProvableStateTransitionType, - merkleWitness: RollupMerkleTreeWitness, + witness: RollupMerkleTreeWitness, index = 0 ) { + const impliedRoot = this.applyTransitionToRoot( + transition, + currentBatch.root, + witness, + index + ); + + // Append ST to the current batch's ST-list + const stList = new DefaultProvableHashList( + ProvableStateTransition, + currentBatch.batchHash + ); + stList.push(transition); + + // Update batch + return new AppliedStateTransitionBatchState({ + batchHash: stList.commitment, + root: impliedRoot, + }); + } + + private applyTransitionToRoot( + transition: ProvableStateTransition, + root: Field, + merkleWitness: RollupMerkleTreeWitness, + index: number + ): Field { const membershipValid = merkleWitness.checkMembership( - state.stateRoot, + root, transition.path, transition.from.value ); membershipValid .or(transition.from.isSome.not()) - .assertTrue(errors.merkleWitnessNotCorrect(index, type)); + .assertTrue(errors.merkleWitnessNotCorrect(index)); const newRoot = merkleWitness.calculateRoot(transition.to.value); - state.stateRoot = Provable.if( - transition.to.isSome, - newRoot, - state.stateRoot - ); - - // Only update protocol state root if ST is also of type protocol - // Since protocol STs are all at the start of the batch, this works - state.protocolStateRoot = Provable.if( - transition.to.isSome.and(type.isProtocol()), - newRoot, - state.protocolStateRoot - ); - - const isNotDummy = transition.path.equals(Field(0)).not(); - - state.stateTransitionList.pushIf( - transition, - isNotDummy.and(type.isNormal()) - ); - state.protocolTransitionList.pushIf( - transition, - isNotDummy.and(type.isProtocol()) - ); + return Provable.if(transition.to.isSome, newRoot, root); } /** * Applies a whole batch of StateTransitions at once */ @provableMethod() - public async runBatch( + public async proveBatch( publicInput: StateTransitionProverPublicInput, - batch: StateTransitionProvableBatch + batch: StateTransitionProvableBatch, + witnesses: MerkleWitnessBatch, + currentAppliedBatch: AppliedStateTransitionBatchState ): Promise { - const result = this.applyTransitions( - publicInput.stateRoot, - publicInput.protocolStateRoot, - publicInput.stateTransitionsHash, - publicInput.protocolTransitionsHash, - batch - ); + currentAppliedBatch + .hashOrZero() + .assertEquals( + publicInput.currentBatchStateHash, + "Provided startingAppliedBatch not matching PI hash" + ); + + // Assert that either the currentAppliedBatch is somewhere intermediary + // or the root is the current "finalized" root + currentAppliedBatch.root + .equals(publicInput.root) + .or(publicInput.currentBatchStateHash.equals(0).not()) + .assertTrue(); + + const state: StateTransitionProverExecutionState = { + batchList: new AppliedBatchHashList(publicInput.batchesHash), + currentBatch: currentAppliedBatch, + finalizedRoot: publicInput.root, + witnessedRoots: new WitnessedRootHashList(publicInput.witnessedRootsHash), + }; + + const result = this.applyTransitions(state, batch, witnesses); return new StateTransitionProverPublicOutput({ - stateRoot: result.stateRoot, - stateTransitionsHash: result.stateTransitionList.commitment, - protocolTransitionsHash: result.protocolTransitionList.commitment, - protocolStateRoot: result.protocolStateRoot, + batchesHash: result.batchList.commitment, + currentBatchStateHash: result.currentBatch.hashOrZero(), + root: result.finalizedRoot, + witnessedRootsHash: result.witnessedRoots.commitment, }); } @@ -272,69 +343,66 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< proof1.verify(); proof2.verify(); - // Check state - publicInput.stateRoot.assertEquals( - proof1.publicInput.stateRoot, - errors.propertyNotMatching("stateRoot", "publicInput.from -> proof1.from") - ); - proof1.publicOutput.stateRoot.assertEquals( - proof2.publicInput.stateRoot, - errors.propertyNotMatching("stateRoot", "proof1.to -> proof2.from") - ); - - // Check ST list - publicInput.stateTransitionsHash.assertEquals( - proof1.publicInput.stateTransitionsHash, + // Check current batch hash + publicInput.currentBatchStateHash.assertEquals( + proof1.publicInput.currentBatchStateHash, errors.propertyNotMatching( - "stateTransitionsHash", + "currentBatchStateHash", "publicInput.from -> proof1.from" ) ); - proof1.publicOutput.stateTransitionsHash.assertEquals( - proof2.publicInput.stateTransitionsHash, + proof1.publicOutput.currentBatchStateHash.assertEquals( + proof2.publicInput.currentBatchStateHash, errors.propertyNotMatching( - "stateTransitionsHash", + "currentBatchStateHash", "proof1.to -> proof2.from" ) ); - // Check Protocol ST list - publicInput.protocolTransitionsHash.assertEquals( - proof1.publicInput.protocolTransitionsHash, + // Check batches hash + publicInput.batchesHash.assertEquals( + proof1.publicInput.batchesHash, errors.propertyNotMatching( - "protocolTransitionsHash", + "batchesHash", "publicInput.from -> proof1.from" ) ); - proof1.publicOutput.protocolTransitionsHash.assertEquals( - proof2.publicInput.protocolTransitionsHash, - errors.propertyNotMatching( - "protocolTransitionsHash", - "proof1.to -> proof2.from" - ) + proof1.publicOutput.batchesHash.assertEquals( + proof2.publicInput.batchesHash, + errors.propertyNotMatching("batchesHash", "proof1.to -> proof2.from") ); - // Check protocol state root - publicInput.protocolStateRoot.assertEquals( - proof1.publicInput.protocolStateRoot, + // Check root + publicInput.root.assertEquals( + proof1.publicInput.root, + errors.propertyNotMatching("root", "publicInput.from -> proof1.from") + ); + proof1.publicOutput.root.assertEquals( + proof2.publicInput.root, + errors.propertyNotMatching("root", "proof1.to -> proof2.from") + ); + + // Check root accumulator + publicInput.witnessedRootsHash.assertEquals( + proof1.publicInput.witnessedRootsHash, errors.propertyNotMatching( - "protocolStateRoot", + "witnessedRootsHash", "publicInput.from -> proof1.from" ) ); - proof1.publicOutput.protocolStateRoot.assertEquals( - proof2.publicInput.protocolStateRoot, + proof1.publicOutput.witnessedRootsHash.assertEquals( + proof2.publicInput.witnessedRootsHash, errors.propertyNotMatching( - "protocolStateRoot", + "witnessedRootsHash", "proof1.to -> proof2.from" ) ); return new StateTransitionProverPublicInput({ - stateRoot: proof2.publicOutput.stateRoot, - stateTransitionsHash: proof2.publicOutput.stateTransitionsHash, - protocolTransitionsHash: proof2.publicOutput.protocolTransitionsHash, - protocolStateRoot: proof2.publicOutput.protocolStateRoot, + currentBatchStateHash: proof2.publicOutput.currentBatchStateHash, + batchesHash: proof2.publicOutput.batchesHash, + root: proof2.publicOutput.root, + witnessedRootsHash: proof2.publicOutput.witnessedRootsHash, }); } } @@ -360,11 +428,18 @@ export class StateTransitionProver return await this.zkProgrammable.compile(registry); } - public runBatch( + public proveBatch( publicInput: StateTransitionProverPublicInput, - batch: StateTransitionProvableBatch + batch: StateTransitionProvableBatch, + witnesses: MerkleWitnessBatch, + startingAppliedBatch: AppliedStateTransitionBatchState ): Promise { - return this.zkProgrammable.runBatch(publicInput, batch); + return this.zkProgrammable.proveBatch( + publicInput, + batch, + witnesses, + startingAppliedBatch + ); } public merge( diff --git a/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts b/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts index 3f1fe412f..675c4ce72 100644 --- a/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts +++ b/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts @@ -362,6 +362,10 @@ export abstract class SettlementSmartContractBase extends TokenContractV2 { Bool(true), "Supplied proof is not a closed BlockProof" ); + blockProof.publicOutput.pendingSTBatchesHash.assertEquals( + Field(0), + "Supplied proof is has outstanding STs to be proven" + ); // Execute onSettlementHooks for additional checks const stateRecord: SettlementStateRecord = { diff --git a/packages/protocol/src/state/assert/assert.ts b/packages/protocol/src/state/assert/assert.ts index 0971bd0d2..f15b554d3 100644 --- a/packages/protocol/src/state/assert/assert.ts +++ b/packages/protocol/src/state/assert/assert.ts @@ -20,13 +20,16 @@ export function assert(condition: Bool, message?: string | (() => string)) { Provable.asProver(() => { if (!condition.toBoolean()) { - if (!executionContext.current().isSimulated) { - log.debug("Assertion failed: ", message); - } const messageString = message !== undefined && typeof message === "function" ? message() : message; + + // If no isSimulated was set, we treat it as not simulated, + // therefore printing the log + if (!(executionContext.current().isSimulated ?? false)) { + log.debug("Assertion failed: ", messageString); + } executionContext.setStatusMessage(messageString, new Error().stack); } }); diff --git a/packages/protocol/src/state/context/RuntimeMethodExecutionContext.ts b/packages/protocol/src/state/context/RuntimeMethodExecutionContext.ts index 968afc82c..4bc75dabc 100644 --- a/packages/protocol/src/state/context/RuntimeMethodExecutionContext.ts +++ b/packages/protocol/src/state/context/RuntimeMethodExecutionContext.ts @@ -56,12 +56,17 @@ export class RuntimeMethodExecutionContext extends ProvableMethodExecutionContex public input: RuntimeMethodExecutionData | undefined; - // The input corresponding to the current result - private lastInput: RuntimeMethodExecutionData | undefined; + private isSimulated: boolean = false; - public override result = new RuntimeProvableMethodExecutionResult(); + // The inputs corresponding to the current result + private lastInputs: + | { + input: RuntimeMethodExecutionData | undefined; + isSimulated: boolean; + } + | undefined; - private isSimulated: boolean = false; + public override result = new RuntimeProvableMethodExecutionResult(); private assertSetupCalled(): asserts this is { input: RuntimeMethodExecutionData; @@ -152,7 +157,10 @@ export class RuntimeMethodExecutionContext extends ProvableMethodExecutionContex public afterMethod() { super.afterMethod(); if (this.isFinished) { - this.lastInput = this.input; + this.lastInputs = { + input: this.input, + isSimulated: this.isSimulated, + }; // TODO: find out why input isnt set in TransactionFeeHook during assert // this.input = undefined; this.isSimulated = false; @@ -167,8 +175,8 @@ export class RuntimeMethodExecutionContext extends ProvableMethodExecutionContex return { isFinished: this.isFinished, result: this.result, - input: this.lastInput, - isSimulated: this.isSimulated, + input: this.lastInputs?.input, + isSimulated: this.lastInputs?.isSimulated, }; } } diff --git a/packages/protocol/src/utils/FieldOptions.ts b/packages/protocol/src/utils/FieldOptions.ts new file mode 100644 index 000000000..68adb1778 --- /dev/null +++ b/packages/protocol/src/utils/FieldOptions.ts @@ -0,0 +1,13 @@ +import { Bool, Field, Provable, Struct } from "o1js"; + +export class FieldOption extends Struct({ + isSome: Bool, + value: Field, +}) { + public static from(isSome: Bool, potentialValue: Field) { + return { + isSome, + value: Provable.if(isSome, potentialValue, Field(0)), + }; + } +} diff --git a/packages/protocol/src/utils/ProvableHashList.ts b/packages/protocol/src/utils/ProvableHashList.ts index 02d572449..9b824d60a 100644 --- a/packages/protocol/src/utils/ProvableHashList.ts +++ b/packages/protocol/src/utils/ProvableHashList.ts @@ -1,4 +1,23 @@ -import { Field, Poseidon, Bool, Provable, ProvablePure } from "o1js"; +import { + Field, + Poseidon, + Bool, + Provable, + ProvablePure, + Unconstrained, +} from "o1js"; + +import { NonMethods } from "./utils"; + +export type ProvableHashListData = { + preimage: Field; + value: NonMethods; +}; + +export type VerifiedTransition = { + from: T; + to: T; +}; /** * Utilities for creating a hash list from a given value type. @@ -6,11 +25,49 @@ import { Field, Poseidon, Bool, Provable, ProvablePure } from "o1js"; export abstract class ProvableHashList { public constructor( protected readonly valueType: ProvablePure, - public commitment: Field = Field(0) + public commitment: Field = Field(0), + private unconstrainedList: Unconstrained< + ProvableHashListData[] + > = Unconstrained.from([]) ) {} protected abstract hash(elements: Field[]): Field; + private pushUnconstrained(preimage: Field, value: Value) { + const valueConstant = this.valueType.fromFields( + this.valueType.toFields(value).map((field) => field.toConstant()) + ); + this.unconstrainedList.get().push({ + preimage: preimage.toConstant(), + value: valueConstant, + }); + } + + /** + * Fast-forwards the state of the hashlist to a specified new tip. + * This assumes the transition (from -> to) to be already verified somewhere + * else that is outside this scope. + */ + public fastForward( + transition: VerifiedTransition, + message: string = "some hashlist" + ) { + const { from, to } = transition; + from.assertEquals( + this.commitment, + `From-commitment for ${message} not matching` + ); + + this.commitment = to; + } + + public witnessTip(preimage: Field, value: Value): Bool { + return this.hash([ + this.commitment, + ...this.valueType.toFields(value), + ]).equals(this.commitment); + } + /** * Converts the provided value to Field[] and appends it to * the current hashlist. @@ -19,19 +76,31 @@ export abstract class ProvableHashList { * @returns Current hash list. */ public push(value: Value) { + Provable.asProver(() => { + this.pushUnconstrained(this.commitment, value); + }); + this.commitment = this.hash([ this.commitment, ...this.valueType.toFields(value), ]); + return this; } public pushIf(value: Value, condition: Bool) { + Provable.asProver(() => { + if (condition.toBoolean()) { + this.pushUnconstrained(this.commitment, value); + } + }); + const newCommitment = this.hash([ this.commitment, ...this.valueType.toFields(value), ]); this.commitment = Provable.if(condition, newCommitment, this.commitment); + return this; } @@ -41,6 +110,12 @@ export abstract class ProvableHashList { public toField() { return this.commitment; } + + public getUnconstrainedValues(): Unconstrained< + ProvableHashListData[] + > { + return this.unconstrainedList; + } } export class DefaultProvableHashList extends ProvableHashList { diff --git a/packages/protocol/src/utils/ProvableReductionHashList.ts b/packages/protocol/src/utils/ProvableReductionHashList.ts index 196ce2c7c..56130f3aa 100644 --- a/packages/protocol/src/utils/ProvableReductionHashList.ts +++ b/packages/protocol/src/utils/ProvableReductionHashList.ts @@ -1,8 +1,13 @@ -import { Bool, Field, Poseidon, Provable } from "o1js"; +import { Bool, Field, Poseidon, Provable, ProvablePure } from "o1js"; -import { ProvableHashList } from "./ProvableHashList"; +import { NonMethods } from "./utils"; + +export class ProvableReductionHashList { + public constructor( + protected readonly valueType: ProvablePure, + public commitment: Field = Field(0) + ) {} -export class ProvableReductionHashList extends ProvableHashList { public unconstrainedList: Value[] = []; private constrainedLastValue: Value | undefined = undefined; @@ -65,4 +70,8 @@ export class ProvableReductionHashList extends ProvableHashList { public hash(elements: Field[]): Field { return Poseidon.hash(elements); } + + public getUnconstrainedValues(): NonMethods[] { + return this.unconstrainedList; + } } diff --git a/packages/protocol/src/utils/utils.ts b/packages/protocol/src/utils/utils.ts index fca865ee9..660d8c456 100644 --- a/packages/protocol/src/utils/utils.ts +++ b/packages/protocol/src/utils/utils.ts @@ -1,4 +1,4 @@ -import { Field, Poseidon, Provable } from "o1js"; +import { Bool, Field, Poseidon, Provable } from "o1js"; import floor from "lodash/floor"; export type ReturnType = FunctionType extends ( @@ -73,3 +73,20 @@ export function singleFieldToString(value: Field | bigint): string { } return fieldValue.toString(); } + +type NonMethodKeys = { + [K in keyof T]: T[K] extends Function ? never : K; +}[keyof T]; +export type NonMethods = Pick>; + +/** + * Asserts the equality of a and b, but only if doAssertion is true, otherwise it will assert 0 = 0 + */ +export function assertEqualsIf( + a: Field, + b: Field, + doAssertion: Bool, + msg: string +) { + a.mul(doAssertion.toField()).assertEquals(b.mul(doAssertion.toField()), msg); +} diff --git a/packages/protocol/test/BlockProver.test.ts b/packages/protocol/test/BlockProver.test.ts index fae2a200a..44b49dc6b 100644 --- a/packages/protocol/test/BlockProver.test.ts +++ b/packages/protocol/test/BlockProver.test.ts @@ -70,6 +70,7 @@ describe("blockProver", () => { const protocol = createAndInitTestingProtocol(); + /* function generateTestProofs( fromStateRoot: Field, toStateRoot: Field, @@ -120,6 +121,7 @@ describe("blockProver", () => { return [appProof, stProof]; } + */ it("dummy", () => { expect(1).toBe(1); diff --git a/packages/protocol/test/TestingProtocol.ts b/packages/protocol/test/TestingProtocol.ts index 568f26c95..5785d3221 100644 --- a/packages/protocol/test/TestingProtocol.ts +++ b/packages/protocol/test/TestingProtocol.ts @@ -9,6 +9,7 @@ import { BlockProver, LastStateRootBlockHook, Protocol, + StateServiceProvider, StateTransitionProver, } from "../src"; @@ -48,5 +49,9 @@ export function createAndInitTestingProtocol() { }); protocol.create(() => appChain.createChildContainer()); + protocol.registerValue({ + StateServiceProvider: new StateServiceProvider(), + }); + return protocol; } diff --git a/packages/protocol/test/model/StateTransitionProvableBatch.test.ts b/packages/protocol/test/model/StateTransitionProvableBatch.test.ts new file mode 100644 index 000000000..d37b25542 --- /dev/null +++ b/packages/protocol/test/model/StateTransitionProvableBatch.test.ts @@ -0,0 +1,137 @@ +import "reflect-metadata"; +import { Bool, Field } from "o1js"; + +import { + ProvableOption, + ProvableStateTransition, + ProvableStateTransitionType, + StateTransitionProvableBatch, +} from "../../src"; + +describe("StateTransitionProvableBatch", () => { + function createST(path: Field, from: Field, to: Field) { + return new ProvableStateTransition({ + path, + from: new ProvableOption({ + isSome: Bool(true), + value: from, + }), + to: new ProvableOption({ + isSome: Bool(true), + value: to, + }), + }); + } + + it("should place witnessRoots correctly on empty batch", () => { + const st = createST(Field(1), Field(2), Field(3)); + const data = [ + { + stateTransitions: [st, st], + applied: Bool(true), + witnessRoot: Bool(false), + }, + { + stateTransitions: [], + applied: Bool(true), + witnessRoot: Bool(true), + }, + ]; + + const batches = StateTransitionProvableBatch.fromBatches(data); + expect(batches).toHaveLength(1); + + const { batch } = batches[0]; + expect(batch[0].witnessRoot.toBoolean()).toBe(false); + expect(batch[1].witnessRoot.toBoolean()).toBe(true); + // Should be dummy + expect(batch[2].witnessRoot.toBoolean()).toBe(false); + }); + + it("should place witnessRoots correctly on empty batch", () => { + const st = createST(Field(1), Field(2), Field(3)); + const data = [ + { + stateTransitions: [st, st], + applied: Bool(true), + witnessRoot: Bool(false), + }, + { + stateTransitions: [st], + applied: Bool(false), + witnessRoot: Bool(true), + }, + ]; + + const batches = StateTransitionProvableBatch.fromBatches(data); + expect(batches).toHaveLength(1); + + const { batch } = batches[0]; + expect(batch[0].witnessRoot.toBoolean()).toBe(false); + expect(batch[1].witnessRoot.toBoolean()).toBe(false); + expect(batch[2].witnessRoot.toBoolean()).toBe(true); + }); + + it("should transform correctly", () => { + const st = createST(Field(1), Field(2), Field(3)); + const data = [ + { + stateTransitions: [st, st, st, st, st], + applied: Bool(true), + witnessRoot: Bool(true), + }, + { + stateTransitions: [st, st], + applied: Bool(false), + witnessRoot: Bool(false), + }, + ]; + + const batches = StateTransitionProvableBatch.fromBatches(data); + expect(batches).toHaveLength(2); + const { nothing, closeAndThrowAway, closeAndApply } = + ProvableStateTransitionType; + + const types = batches[0].batch.map(({ type }) => type); + const stateTransitions = batches[0].batch.map( + ({ stateTransition }) => stateTransition + ); + const witnessRoots = batches[0].batch.map(({ witnessRoot }) => witnessRoot); + + const types2 = batches[1].batch.map(({ type }) => type); + const stateTransitions2 = batches[1].batch.map( + ({ stateTransition }) => stateTransition + ); + const witnessRoots2 = batches[1].batch.map( + ({ witnessRoot }) => witnessRoot + ); + + expect(types).toStrictEqual([nothing, nothing, nothing, nothing]); + expect(stateTransitions).toStrictEqual([st, st, st, st]); + expect(witnessRoots).toStrictEqual([ + Bool(false), + Bool(false), + Bool(false), + Bool(false), + ]); + + expect(stateTransitions2).toStrictEqual([ + st, + st, + st, + ProvableStateTransition.dummy(), + ]); + expect(types2).toStrictEqual([ + closeAndApply, + nothing, + closeAndThrowAway, + nothing, + ]); + expect(witnessRoots2).toStrictEqual([ + Bool(true), + Bool(false), + Bool(false), + Bool(false), + ]); + }); +}); diff --git a/packages/protocol/test/prover/block/BlockProver.test.ts b/packages/protocol/test/prover/block/BlockProver.test.ts new file mode 100644 index 000000000..852f5833c --- /dev/null +++ b/packages/protocol/test/prover/block/BlockProver.test.ts @@ -0,0 +1,18 @@ +/* eslint-disable max-len */ +/** + * Testing strategy: + * + * - Test that hooks are executed and batches are created correctly + * - Transaction + * - Block + * - Test the various static checks on the transaction (signature, verificationKey, network state hash) + * - Test correct construction of the batch and list commitments + * - Test correct integration of the STProof - both defer and notDefer + * - proveBlock: correct blockNumber progression, closed flag (doesn't accepts closed proofs as tx proofs) + */ + +/* eslint-enable max-len */ + +it("dummy", () => { + expect(1).toBe(1); +}); diff --git a/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts b/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts new file mode 100644 index 000000000..930a2e49e --- /dev/null +++ b/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts @@ -0,0 +1,240 @@ +import { InMemoryAreProofsEnabled } from "@proto-kit/sdk"; +import { Bool, Field } from "o1js"; +import { + InMemoryMerkleTreeStorage, + padArray, + RollupMerkleTree, + RollupMerkleTreeWitness, +} from "@proto-kit/common"; + +import { + AppliedStateTransitionBatchState, + ProvableOption, + ProvableStateTransition, + StateTransitionProvableBatch, + StateTransitionProverProgrammable, +} from "../../../src"; + +describe("StateTransitionProver", () => { + let prover: StateTransitionProverProgrammable; + + function setup() { + prover = new StateTransitionProverProgrammable({ + get areProofsEnabled() { + return new InMemoryAreProofsEnabled(); + }, + }); + } + + beforeEach(() => { + setup(); + }); + + function createST(path: Field, from: Field, to: Field) { + return new ProvableStateTransition({ + path, + from: new ProvableOption({ + isSome: Bool(true), + value: from, + }), + to: new ProvableOption({ + isSome: Bool(true), + value: to, + }), + }); + } + + // function applyToTree( + // tree: RollupMerkleTree, + // batches: StateTransitionProvableBatch[], + // indizes: number[] + // ) { + // const flat = batches.flatMap((batch) => batch.batch); + // indizes.forEach((index) => { + // const st = flat[index].stateTransition; + // if (st.to.isSome) { + // tree.setLeaf(st.path.toBigInt(), st.to.value); + // } + // }); + // } + + describe("currentBatchHash", () => { + it.each([ + [true, true], + [true, false], + [false, false], + ])( + "should retain empty currentBatchHash for padded dummies", + async (applied, witnessRoot) => { + const batch = StateTransitionProvableBatch.fromBatches([ + { + stateTransitions: [createST(Field(1), Field(0), Field(2))], + applied: Bool(applied), + witnessRoot: Bool(witnessRoot), + }, + ]); + + const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); + const witness = tree.getWitness(1n); + + const result = await prover.proveBatch( + { + root: tree.getRoot(), + witnessedRootsHash: Field(0), + batchesHash: Field(0), + currentBatchStateHash: Field(0), + }, + batch[0], + { + witnesses: padArray([witness], 4, () => + RollupMerkleTreeWitness.dummy() + ), + }, + new AppliedStateTransitionBatchState({ + root: tree.getRoot(), + batchHash: Field(0), + }) + ); + + expect(result.currentBatchStateHash.toString()).toStrictEqual("0"); + } + ); + }); + + describe("dummies", () => { + it("should fail if dummy is type close", async () => { + expect.assertions(1); + + const batch = StateTransitionProvableBatch.fromBatches([ + { + stateTransitions: [ProvableStateTransition.dummy()], + applied: Bool(true), + witnessRoot: Bool(true), + }, + ]); + + const prove = async () => + await prover.proveBatch( + { + root: Field(RollupMerkleTree.EMPTY_ROOT), + witnessedRootsHash: Field(0), + batchesHash: Field(0), + currentBatchStateHash: Field(0), + }, + batch[0], + { + witnesses: padArray([], 4, RollupMerkleTreeWitness.dummy), + }, + new AppliedStateTransitionBatchState({ + root: Field(RollupMerkleTree.EMPTY_ROOT), + batchHash: Field(0), + }) + ); + + await expect(prove).rejects.toThrow( + /Dummies have to be of type 'nothing'.*/ + ); + }); + + it("should fail if dummy is in the middle", async () => { + expect.assertions(1); + + const batch = StateTransitionProvableBatch.fromBatches([ + { + stateTransitions: [ + createST(Field(1), Field(0), Field(2)), + ProvableStateTransition.dummy(), + createST(Field(2), Field(0), Field(3)), + ], + applied: Bool(true), + witnessRoot: Bool(true), + }, + ]); + + const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); + + const inputRoot = tree.getRoot(); + + const witness = tree.getWitness(1n); + tree.setLeaf(1n, Field(2)); + const witness2 = tree.getWitness(2n); + + const prove = async () => + await prover.proveBatch( + { + root: inputRoot, + witnessedRootsHash: Field(0), + batchesHash: Field(0), + currentBatchStateHash: Field(0), + }, + batch[0], + { + witnesses: [ + witness, + RollupMerkleTreeWitness.dummy(), + witness2, + RollupMerkleTreeWitness.dummy(), + ], + }, + new AppliedStateTransitionBatchState({ + root: inputRoot, + batchHash: Field(0), + }) + ); + + await expect(prove).rejects.toThrow( + /Dummies can only be placed on closed batchLists.*/ + ); + }); + }); + + describe("batch progression", () => { + it("should throw away non-applied batches", async () => { + const batch = StateTransitionProvableBatch.fromBatches([ + { + stateTransitions: [ + createST(Field(1), Field(0), Field(2)), + createST(Field(2), Field(0), Field(3)), + ], + applied: Bool(true), + witnessRoot: Bool(true), + }, + { + stateTransitions: [ + createST(Field(2), Field(3), Field(4)), + createST(Field(2), Field(4), Field(5)), + ], + applied: Bool(false), + witnessRoot: Bool(true), + }, + ]); + + const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); + + const witness1 = tree.getWitness(1n); + tree.setLeaf(1n, Field(2)); + const witness2 = tree.getWitness(2n); + tree.setLeaf(2n, Field(3)); + + const result = await prover.proveBatch( + { + root: Field(RollupMerkleTree.EMPTY_ROOT), + witnessedRootsHash: Field(0), + batchesHash: Field(0), + currentBatchStateHash: Field(0), + }, + batch[0], + { + witnesses: [witness1, witness2, witness2, witness2], + }, + new AppliedStateTransitionBatchState({ + root: Field(RollupMerkleTree.EMPTY_ROOT), + batchHash: Field(0), + }) + ); + + expect(result.root.toString()).toStrictEqual(tree.getRoot().toString()); + expect(result.currentBatchStateHash.toString()).toStrictEqual("0"); + }); + }); +}); diff --git a/packages/protocol/test/utils/ProvableHashList.test.ts b/packages/protocol/test/utils/ProvableHashList.test.ts new file mode 100644 index 000000000..b6456a665 --- /dev/null +++ b/packages/protocol/test/utils/ProvableHashList.test.ts @@ -0,0 +1,44 @@ +import "reflect-metadata"; +import { Bool, Field, Poseidon } from "o1js"; + +import { DefaultProvableHashList } from "../../src"; + +describe("defaultProvableHashList", () => { + describe.each([ + [0, [{ value: 1n, push: true }]], + // [ + // 10, + // [ + // { value: 1n, push: true }, + // { value: 5n, push: false }, + // ], + // ], + // [ + // 10, + // [ + // { value: 1n, push: true }, + // { value: 5n, push: false }, + // { value: 6n, push: true }, + // ], + // ], + ])("should correctly append and save", (start, elements) => { + it("Using only pushIf", () => { + const hashList = new DefaultProvableHashList(Field, Field(start)); + + // eslint-disable-next-line sonarjs/no-unused-collection + const appended: bigint[] = []; + let hash = Field(start); + + for (const element of elements) { + hashList.pushIf(Field(element.value), Bool(element.push)); + if (element.push) { + appended.push(element.value); + + hash = Poseidon.hash([hash, Field(element.value)]); + } + } + + expect(hash.toString()).toStrictEqual(hashList.commitment.toString()); + }); + }); +}); diff --git a/packages/protocol/test/utils/ProvableReductionHashList.test.ts b/packages/protocol/test/utils/ProvableReductionHashList.test.ts index 1f874f311..76e1faabe 100644 --- a/packages/protocol/test/utils/ProvableReductionHashList.test.ts +++ b/packages/protocol/test/utils/ProvableReductionHashList.test.ts @@ -4,7 +4,7 @@ import { Bool, Field, Poseidon } from "o1js"; import { StateTransitionReductionList, reduceStateTransitions, -} from "../../src/utils/StateTransitionReductionList"; +} from "../../src/prover/accumulators/StateTransitionReductionList"; import { DefaultProvableHashList, ProvableStateTransition, diff --git a/packages/sdk/test/blockProof/blockProof.test.ts b/packages/sdk/test/blockProof/blockProof.test.ts index 2ae4f389e..0dc1cbdb1 100644 --- a/packages/sdk/test/blockProof/blockProof.test.ts +++ b/packages/sdk/test/blockProof/blockProof.test.ts @@ -88,7 +88,7 @@ describe.skip("blockProof", () => { ); await mapSequential(txHooks, async (hook) => { - await hook.onTransaction({ + await hook.beforeTransaction({ transaction: RuntimeTransaction.fromTransaction({ sender: alice, nonce: O1UInt64.from(0), @@ -97,6 +97,12 @@ describe.skip("blockProof", () => { }), networkState: NetworkState.empty(), signature: Signature.create(PrivateKey.random(), [Field(0)]), + prover: { + incomingMessagesHash: Field(0), + transactionsHash: Field(0), + eternalTransactionsHash: Field(0), + blockHashRoot: Field(0), + }, }); }); diff --git a/packages/sequencer/package.json b/packages/sequencer/package.json index 9f0a70982..aa2545623 100644 --- a/packages/sequencer/package.json +++ b/packages/sequencer/package.json @@ -33,7 +33,7 @@ }, "dependencies": { "compute-gcd": "^1.2.1", - "lodash": "^4.17.21", + "lodash-es": "^4.17.21", "reflect-metadata": "^0.1.13", "ts-pattern": "^4.3.0", "mina-fungible-token": "^1.0.0" diff --git a/packages/sequencer/src/index.ts b/packages/sequencer/src/index.ts index e92597ab2..3637619c2 100644 --- a/packages/sequencer/src/index.ts +++ b/packages/sequencer/src/index.ts @@ -42,11 +42,19 @@ export * from "./protocol/production/trigger/BlockTrigger"; export * from "./protocol/production/trigger/ManualBlockTrigger"; export * from "./protocol/production/trigger/TimedBlockTrigger"; export * from "./protocol/production/BatchProducerModule"; -export * from "./protocol/production/BlockTaskFlowService"; -export * from "./protocol/production/TransactionTraceService"; -export * from "./protocol/production/sequencing/TransactionExecutionService"; export * from "./protocol/production/sequencing/BlockProducerModule"; +export * from "./protocol/production/sequencing/TransactionExecutionService"; +export * from "./protocol/production/sequencing/BlockProductionService"; +export * from "./protocol/production/sequencing/BlockResultService"; export * from "./protocol/production/flow/ReductionTaskFlow"; +export * from "./protocol/production/flow/TransactionFlow"; +export * from "./protocol/production/flow/StateTransitionFlow"; +export * from "./protocol/production/flow/BlockFlow"; +export * from "./protocol/production/flow/BatchFlow"; +export * from "./protocol/production/tracing/BlockTracingService"; +export * from "./protocol/production/tracing/BatchTracingService"; +export * from "./protocol/production/tracing/StateTransitionTracingService"; +export * from "./protocol/production/tracing/TransactionTracingService"; export * from "./sequencer/SequencerStartupModule"; export * from "./storage/model/Batch"; export * from "./storage/model/Block"; diff --git a/packages/sequencer/src/mempool/private/PrivateMempool.ts b/packages/sequencer/src/mempool/private/PrivateMempool.ts index 47f12adb4..2e34e8b88 100644 --- a/packages/sequencer/src/mempool/private/PrivateMempool.ts +++ b/packages/sequencer/src/mempool/private/PrivateMempool.ts @@ -2,9 +2,11 @@ import { EventEmitter, log, noop } from "@proto-kit/common"; import { container, inject } from "tsyringe"; import { AccountStateHook, + BlockHashMerkleTree, MandatoryProtocolModulesRecord, NetworkState, Protocol, + ProvableHookBlockState, RuntimeMethodExecutionContext, RuntimeMethodExecutionData, StateServiceProvider, @@ -117,6 +119,8 @@ export class PrivateMempool extends SequencerModule implements Mempool { // in the skipped txs list and when later txs succeed we check to see if any state transition // paths are shared between the just succeeded tx and any of the skipped txs. This is // because a failed tx may succeed now if the failure was to do with a nonce issue, say. + // TODO Refactor + // eslint-disable-next-line sonarjs/cognitive-complexity private async checkTxValid( transactions: PendingTransaction[], baseService: CachedStateService, @@ -135,6 +139,20 @@ export class PrivateMempool extends SequencerModule implements Mempool { let queue: PendingTransaction[] = [...transactions]; + const previousBlock = await this.unprovenQueue.getLatestBlock(); + + // TODO This is not sound currently as the prover state changes all the time + // in the actual blockprover. We need to properly simulate that + const proverState: ProvableHookBlockState = { + blockHashRoot: Field( + previousBlock?.result.blockHashRoot ?? BlockHashMerkleTree.EMPTY_ROOT + ), + eternalTransactionsHash: + previousBlock?.block.toEternalTransactionsHash ?? Field(0), + transactionsHash: previousBlock?.block.transactionsHash ?? Field(0), + incomingMessagesHash: previousBlock?.block.toMessagesHash ?? Field(0), + }; + while ( queue.length > 0 && sortedTransactions.length < (limit ?? Number.MAX_VALUE) @@ -150,10 +168,11 @@ export class PrivateMempool extends SequencerModule implements Mempool { const signedTransaction = tx.toProtocolTransaction(); // eslint-disable-next-line no-await-in-loop - await this.accountStateHook.onTransaction({ + await this.accountStateHook.beforeTransaction({ networkState: networkState, transaction: signedTransaction.transaction, signature: signedTransaction.signature, + prover: proverState, }); const { status, statusMessage, stateTransitions } = executionContext.current().result; diff --git a/packages/sequencer/src/protocol/production/BatchProducerModule.ts b/packages/sequencer/src/protocol/production/BatchProducerModule.ts index 8bd61b03b..6bdb77202 100644 --- a/packages/sequencer/src/protocol/production/BatchProducerModule.ts +++ b/packages/sequencer/src/protocol/production/BatchProducerModule.ts @@ -2,13 +2,10 @@ import { inject } from "tsyringe"; import { BlockProverPublicInput, BlockProverPublicOutput, - DefaultProvableHashList, - MINA_EVENT_PREFIXES, - MinaPrefixedProvableHashList, NetworkState, } from "@proto-kit/protocol"; import { Field, Proof } from "o1js"; -import { log, noop, RollupMerkleTree } from "@proto-kit/common"; +import { log, noop } from "@proto-kit/common"; import { sequencerModule, @@ -16,45 +13,20 @@ import { } from "../../sequencer/builder/SequencerModule"; import { BatchStorage } from "../../storage/repositories/BatchStorage"; import { SettleableBatch } from "../../storage/model/Batch"; -import { CachedStateService } from "../../state/state/CachedStateService"; import { CachedMerkleTreeStore } from "../../state/merkle/CachedMerkleTreeStore"; -import { AsyncStateService } from "../../state/async/AsyncStateService"; import { AsyncMerkleTreeStore } from "../../state/async/AsyncMerkleTreeStore"; -import { BlockResult, BlockWithResult } from "../../storage/model/Block"; -import { VerificationKeyService } from "../runtime/RuntimeVerificationKeyService"; +import { BlockWithResult } from "../../storage/model/Block"; import type { Database } from "../../storage/Database"; -import { TransactionTraceService } from "./TransactionTraceService"; -import { BlockTaskFlowService } from "./BlockTaskFlowService"; -import { NewBlockProverParameters } from "./tasks/NewBlockTask"; import { BlockProofSerializer } from "./tasks/serializers/BlockProofSerializer"; -import { RuntimeProofParameters } from "./tasks/RuntimeProvingTask"; -import { StateTransitionProofParameters } from "./tasks/StateTransitionTask"; -import { BlockProverParameters } from "./tasks/TransactionProvingTask"; +import { BatchTracingService } from "./tracing/BatchTracingService"; +import { BatchFlow } from "./flow/BatchFlow"; export type StateRecord = Record; -export interface TransactionTrace { - runtimeProver: RuntimeProofParameters; - stateTransitionProver: StateTransitionProofParameters[]; - blockProver: BlockProverParameters; -} - -export interface BlockTrace { - block: NewBlockProverParameters; - stateTransitionProver: StateTransitionProofParameters[]; - transactions: TransactionTrace[]; -} - -export interface BlockWithPreviousResult { - block: BlockWithResult; - lastBlockResult?: BlockResult; -} - interface BatchMetadata { batch: SettleableBatch; - stateService: CachedStateService; - merkleStore: CachedMerkleTreeStore; + changes: CachedMerkleTreeStore; } const errors = { @@ -75,73 +47,31 @@ export class BatchProducerModule extends SequencerModule { private productionInProgress = false; public constructor( - @inject("AsyncStateService") - private readonly asyncStateService: AsyncStateService, @inject("AsyncMerkleStore") private readonly merkleStore: AsyncMerkleTreeStore, @inject("BatchStorage") private readonly batchStorage: BatchStorage, - @inject("BlockTreeStore") - private readonly blockTreeStore: AsyncMerkleTreeStore, @inject("Database") private readonly database: Database, - private readonly traceService: TransactionTraceService, - private readonly blockFlowService: BlockTaskFlowService, + private readonly batchFlow: BatchFlow, private readonly blockProofSerializer: BlockProofSerializer, - private readonly verificationKeyService: VerificationKeyService + private readonly batchTraceService: BatchTracingService ) { super(); } - private async applyStateChanges(batch: BatchMetadata) { - // TODO Introduce Proven and Unproven BlockHashTree stores - for rollbacks - await this.database.executeInTransaction(async () => { - await batch.stateService.mergeIntoParent(); - await batch.merkleStore.mergeIntoParent(); - }); - } - /** * Main function to call when wanting to create a new block based on the * transactions that are present in the mempool. This function should also - * be the one called by BlockTriggers + * be the one called by BlockTriggerss */ public async createBatch( - blocks: BlockWithPreviousResult[] + blocks: BlockWithResult[] ): Promise { - log.info("Producing batch..."); - - const height = await this.batchStorage.getCurrentBatchHeight(); - - const batchWithStateDiff = await this.tryProduceBatch(blocks, height); - - if (batchWithStateDiff !== undefined) { - const numTxs = blocks.reduce( - (sum, block) => sum + block.block.block.transactions.length, - 0 - ); - log.info( - `Batch produced (${batchWithStateDiff.batch.blockHashes.length} blocks, ${numTxs} txs)` - ); - - // Apply state changes to current StateService - await this.applyStateChanges(batchWithStateDiff); - } - return batchWithStateDiff?.batch; - } - - public async start(): Promise { - noop(); - } - - private async tryProduceBatch( - blocks: BlockWithPreviousResult[], - height: number - ): Promise { if (!this.productionInProgress) { try { this.productionInProgress = true; - const batch = await this.produceBatch(blocks, height); + const batch = await this.tryProduceBatch(blocks); this.productionInProgress = false; @@ -171,15 +101,47 @@ export class BatchProducerModule extends SequencerModule { return undefined; } + private async tryProduceBatch( + blocks: BlockWithResult[] + ): Promise { + log.info("Producing batch..."); + + const height = await this.batchStorage.getCurrentBatchHeight(); + + const batchWithStateDiff = await this.produceBatch(blocks, height); + + if (batchWithStateDiff !== undefined) { + const numTxs = blocks.reduce( + (sum, block) => sum + block.block.transactions.length, + 0 + ); + log.info( + `Batch produced (${batchWithStateDiff.batch.blockHashes.length} blocks, ${numTxs} txs)` + ); + + // Apply state changes to current MerkleTreeStore + await this.database.executeInTransaction(async () => { + await this.batchStorage.pushBatch(batchWithStateDiff.batch); + await batchWithStateDiff.changes.mergeIntoParent(); + }); + + // TODO Add transition from unproven to proven state for stateservice + // This needs proper DB-level masking + } + return batchWithStateDiff?.batch; + } + + public async start(): Promise { + noop(); + } + private async produceBatch( - blocks: BlockWithPreviousResult[], + blocks: BlockWithResult[], height: number ): Promise { const batch = await this.computeBatch(blocks, height); - const blockHashes = blocks.map((bundle) => - bundle.block.block.hash.toString() - ); + const blockHashes = blocks.map((bundle) => bundle.block.hash.toString()); const jsonProof = this.blockProofSerializer .getBlockProofSerializer() @@ -194,31 +156,29 @@ export class BatchProducerModule extends SequencerModule { toNetworkState: batch.toNetworkState, }, - stateService: batch.stateService, - merkleStore: batch.merkleStore, + changes: batch.changes, }; } /** - * Very naive impl for now + * Computes a batch based on an array of sequenced blocks. + * This process is also known as tracing, as we "trace" every computational step + * into witnesses that we can use in the provers. * - * How we produce batches: + * The workflow of computing batches works as follows: * - * 1. We get all pending txs from the mempool and define an order - * 2. We execute them to get results / intermediate state-roots. - * We define a tuple of (tx data (methodId, args), state-input, state-output) - * as a "tx trace" - * 3. We create tasks based on those traces * + * + * @param blocks + * @param blockId + * @private */ - private async computeBatch( - blocks: BlockWithPreviousResult[], + blocks: BlockWithResult[], blockId: number ): Promise<{ proof: Proof; - stateService: CachedStateService; - merkleStore: CachedMerkleTreeStore; + changes: CachedMerkleTreeStore; fromNetworkState: NetworkState; toNetworkState: NetworkState; }> { @@ -226,69 +186,21 @@ export class BatchProducerModule extends SequencerModule { throw errors.blockWithoutTxs(); } - const stateServices = { - stateService: new CachedStateService(this.asyncStateService), - merkleStore: new CachedMerkleTreeStore(this.merkleStore), - }; - - const blockTraces: BlockTrace[] = []; + const merkleTreeStore = new CachedMerkleTreeStore(this.merkleStore); - const eternalBundleTracker = new DefaultProvableHashList( - Field, - blocks[0].block.block.fromEternalTransactionsHash + const trace = await this.batchTraceService.traceBatch( + blocks.map((block) => block), + merkleTreeStore ); - const messageTracker = new MinaPrefixedProvableHashList( - Field, - MINA_EVENT_PREFIXES.sequenceEvents, - blocks[0].block.block.fromMessagesHash - ); - - for (const blockWithPreviousResult of blocks) { - const { block } = blockWithPreviousResult.block; - const txs = block.transactions; - - const bundleTracker = new DefaultProvableHashList(Field); - - const transactionTraces: TransactionTrace[] = []; - - for (const [, tx] of txs.entries()) { - // eslint-disable-next-line no-await-in-loop - const result = await this.traceService.createTransactionTrace( - tx, - stateServices, - this.verificationKeyService, - block.networkState.during, - bundleTracker, - eternalBundleTracker, - messageTracker - ); - - transactionTraces.push(result); - } - - // eslint-disable-next-line no-await-in-loop - const blockTrace = await this.traceService.createBlockTrace( - transactionTraces, - stateServices, - this.blockTreeStore, - Field( - blockWithPreviousResult.lastBlockResult?.stateRoot ?? - RollupMerkleTree.EMPTY_ROOT - ), - blockWithPreviousResult.block - ); - blockTraces.push(blockTrace); - } - const proof = await this.blockFlowService.executeFlow(blockTraces, blockId); + const proof = await this.batchFlow.executeBatch(trace, blockId); - const fromNetworkState = blocks[0].block.block.networkState.before; - const toNetworkState = blocks.at(-1)!.block.result.afterNetworkState; + const fromNetworkState = blocks[0].block.networkState.before; + const toNetworkState = blocks.at(-1)!.result.afterNetworkState; return { proof, - stateService: stateServices.stateService, - merkleStore: stateServices.merkleStore, + changes: merkleTreeStore, fromNetworkState, toNetworkState, }; diff --git a/packages/sequencer/src/protocol/production/BlockTaskFlowService.ts b/packages/sequencer/src/protocol/production/BlockTaskFlowService.ts deleted file mode 100644 index cc02d727b..000000000 --- a/packages/sequencer/src/protocol/production/BlockTaskFlowService.ts +++ /dev/null @@ -1,364 +0,0 @@ -import { inject, injectable, Lifecycle, scoped } from "tsyringe"; -import { Bool, Field, Proof } from "o1js"; -import { - BlockProof, - BlockProverPublicInput, - BlockProverPublicOutput, - MandatoryProtocolModulesRecord, - MethodPublicOutput, - Protocol, - StateTransitionProof, -} from "@proto-kit/protocol"; -import { log, MAX_FIELD } from "@proto-kit/common"; - -import { TaskQueue } from "../../worker/queue/TaskQueue"; -import { Flow, FlowCreator } from "../../worker/flow/Flow"; - -import type { BlockTrace } from "./BatchProducerModule"; -import { StateTransitionTask } from "./tasks/StateTransitionTask"; -import { RuntimeProvingTask } from "./tasks/RuntimeProvingTask"; -import { ReductionTaskFlow } from "./flow/ReductionTaskFlow"; -import { - NewBlockProverParameters, - NewBlockProvingParameters, - NewBlockTask, -} from "./tasks/NewBlockTask"; -import { StateTransitionReductionTask } from "./tasks/StateTransitionReductionTask"; -import { - BlockProverParameters, - TransactionProvingTask, - TransactionProvingTaskParameters, -} from "./tasks/TransactionProvingTask"; -import { BlockReductionTask } from "./tasks/BlockReductionTask"; - -type RuntimeProof = Proof; - -interface BlockProductionFlowState { - pairings: { - runtimeProof?: RuntimeProof; - stProof?: StateTransitionProof; - blockArguments: BlockProverParameters; - }[][]; - - blockPairings: { - blockProof?: BlockProof; - stProof?: StateTransitionProof; - provingArguments: NewBlockProverParameters; - }[]; -} - -/** - * We could rename this into BlockCreationStrategy and enable the injection of - * different creation strategies. - */ -@injectable() -@scoped(Lifecycle.ContainerScoped) -export class BlockTaskFlowService { - public constructor( - @inject("TaskQueue") private readonly taskQueue: TaskQueue, - private readonly flowCreator: FlowCreator, - private readonly stateTransitionTask: StateTransitionTask, - private readonly stateTransitionReductionTask: StateTransitionReductionTask, - private readonly runtimeProvingTask: RuntimeProvingTask, - private readonly transactionProvingTask: TransactionProvingTask, - private readonly blockProvingTask: NewBlockTask, - private readonly blockReductionTask: BlockReductionTask, - @inject("Protocol") - private readonly protocol: Protocol - ) {} - - public async pushPairing( - flow: Flow, - transactionReductionTask: ReductionTaskFlow< - TransactionProvingTaskParameters, - BlockProof - >, - blockIndex: number, - transactionIndex: number - ) { - const { runtimeProof, stProof, blockArguments } = - flow.state.pairings[blockIndex][transactionIndex]; - - if (runtimeProof !== undefined && stProof !== undefined) { - log.trace(`Found pairing block: ${blockIndex}, tx: ${transactionIndex}`); - - await transactionReductionTask.pushInput({ - input1: stProof, - input2: runtimeProof, - params: blockArguments, - }); - } - } - - public async pushBlockPairing( - flow: Flow, - blockReductionTask: ReductionTaskFlow< - NewBlockProvingParameters, - BlockProof - >, - index: number - ) { - const { blockProof, stProof, provingArguments } = - flow.state.blockPairings[index]; - - if (blockProof !== undefined && stProof !== undefined) { - log.debug(`Found block pairing ${index}`); - - await blockReductionTask.pushInput({ - input1: stProof, - input2: blockProof, - params: provingArguments, - }); - } - } - - private createSTMergeFlow(name: string, inputLength: number) { - return new ReductionTaskFlow( - { - name, - inputLength, - mappingTask: this.stateTransitionTask, - reductionTask: this.stateTransitionReductionTask, - - mergableFunction: (a, b) => - a.publicOutput.stateRoot - .equals(b.publicInput.stateRoot) - .and( - a.publicOutput.protocolStateRoot.equals( - b.publicInput.protocolStateRoot - ) - ) - .and( - a.publicOutput.stateTransitionsHash.equals( - b.publicInput.stateTransitionsHash - ) - ) - .toBoolean(), - }, - this.flowCreator - ); - } - - public async executeFlow( - blockTraces: BlockTrace[], - batchId: number - ): Promise { - const flow = this.flowCreator.createFlow( - `main-${batchId}`, - { - pairings: blockTraces.map((blockTrace) => - blockTrace.transactions.map((trace) => ({ - runtimeProof: undefined, - stProof: undefined, - blockArguments: trace.blockProver, - })) - ), - - blockPairings: blockTraces.map((blockTrace) => ({ - blockProof: undefined, - stProof: undefined, - provingArguments: blockTrace.block, - })), - } - ); - - const blockMergingFlow = new ReductionTaskFlow( - { - name: `block-${batchId}`, - inputLength: blockTraces.length, - mappingTask: this.blockProvingTask, - reductionTask: this.blockReductionTask, - - mergableFunction: (a, b) => { - // TODO Proper replication of merge logic - const part1 = a.publicOutput.stateRoot - .equals(b.publicInput.stateRoot) - .and( - a.publicOutput.blockHashRoot.equals(b.publicInput.blockHashRoot) - ) - .and( - a.publicOutput.networkStateHash.equals( - b.publicInput.networkStateHash - ) - ) - .and( - a.publicOutput.eternalTransactionsHash.equals( - b.publicInput.eternalTransactionsHash - ) - ) - .and(a.publicOutput.closed.equals(b.publicOutput.closed)) - .toBoolean(); - - const proof1Closed = a.publicOutput.closed; - const proof2Closed = b.publicOutput.closed; - - const blockNumberProgressionValid = a.publicOutput.blockNumber.equals( - b.publicInput.blockNumber - ); - - const isValidTransactionMerge = a.publicInput.blockNumber - .equals(MAX_FIELD) - .and(blockNumberProgressionValid) - .and(proof1Closed.or(proof2Closed).not()); - - const isValidClosedMerge = proof1Closed - .and(proof2Closed) - .and(blockNumberProgressionValid); - - return ( - part1 && isValidClosedMerge.or(isValidTransactionMerge).toBoolean() - ); - }, - }, - this.flowCreator - ); - blockMergingFlow.onCompletion(async (result) => { - log.debug(`Block generation finished, with proof ${result.proof}`); // TODO Remove result logging - flow.resolve(result); - }); - blockMergingFlow.deferErrorsTo(flow); - - return await flow.withFlow(async () => { - await flow.forEach(blockTraces, async (blockTrace, blockNumber) => { - if (blockTrace.transactions.length > 0) { - const transactionMergingFlow = new ReductionTaskFlow( - { - name: `tx-${batchId}-${blockNumber}`, - inputLength: blockTrace.transactions.length, - mappingTask: this.transactionProvingTask, - reductionTask: this.blockReductionTask, - - mergableFunction: (a, b) => - a.publicOutput.stateRoot - .equals(b.publicInput.stateRoot) - .and( - a.publicOutput.transactionsHash.equals( - b.publicInput.transactionsHash - ) - ) - .and( - a.publicInput.networkStateHash.equals( - b.publicInput.networkStateHash - ) - ) - .toBoolean(), - }, - this.flowCreator - ); - transactionMergingFlow.onCompletion(async (blockProof) => { - flow.state.blockPairings[blockNumber].blockProof = blockProof; - await this.pushBlockPairing(flow, blockMergingFlow, blockNumber); - }); - transactionMergingFlow.deferErrorsTo(flow); - - // Execute if the block is empty - await flow.forEach( - blockTrace.transactions, - async (trace, transactionIndex) => { - // Push runtime task - await flow.pushTask( - this.runtimeProvingTask, - trace.runtimeProver, - async (result) => { - flow.state.pairings[blockNumber][ - transactionIndex - ].runtimeProof = result; - await this.pushPairing( - flow, - transactionMergingFlow, - blockNumber, - transactionIndex - ); - } - ); - - // TODO Dummy ST Proof for transactions that don't emit STs - - const stReductionFlow = this.createSTMergeFlow( - `tx-stproof-${batchId}-${blockNumber}-${transactionIndex}`, - trace.stateTransitionProver.length - ); - stReductionFlow.onCompletion(async (result) => { - flow.state.pairings[blockNumber][transactionIndex].stProof = - result; - await this.pushPairing( - flow, - transactionMergingFlow, - blockNumber, - transactionIndex - ); - }); - stReductionFlow.deferErrorsTo(flow); - - await flow.forEach(trace.stateTransitionProver, async (stp) => { - await stReductionFlow.pushInput(stp); - }); - } - ); - } else { - const piObject = { - stateRoot: - blockTrace.stateTransitionProver[0].publicInput.stateRoot, - networkStateHash: blockTrace.block.publicInput.networkStateHash, - transactionsHash: Field(0), - blockHashRoot: Field(0), - - eternalTransactionsHash: - blockTrace.block.publicInput.eternalTransactionsHash, - incomingMessagesHash: - blockTrace.block.publicInput.incomingMessagesHash, - blockNumber: MAX_FIELD, - }; - const publicInput = new BlockProverPublicInput(piObject); - - // TODO Set publicInput.stateRoot to result after block hooks! - const publicOutput = new BlockProverPublicOutput({ - ...piObject, - closed: Bool(true), - }); - - // Provide a dummy prove is this block is empty - const proof = - await this.protocol.blockProver.zkProgrammable.zkProgram[0].Proof.dummy( - publicInput, - publicOutput, - 2 - ); - - flow.state.blockPairings[blockNumber].blockProof = proof; - await this.pushBlockPairing(flow, blockMergingFlow, blockNumber); - } - - // Push block STs - if (blockTrace.stateTransitionProver[0].stateTransitions.length === 0) { - // Build a dummy proof in case no STs have been emitted - const [{ publicInput }] = blockTrace.stateTransitionProver; - - flow.state.blockPairings[blockNumber].stProof = - await this.protocol.stateTransitionProver.zkProgrammable.zkProgram[0].Proof.dummy( - publicInput, - publicInput, - 2 - ); - - await this.pushBlockPairing(flow, blockMergingFlow, blockNumber); - } else { - const blockSTFlow = this.createSTMergeFlow( - `block-stproof-${batchId}-${blockNumber}`, - blockTrace.stateTransitionProver.length - ); - - blockSTFlow.onCompletion(async (result) => { - flow.state.blockPairings[blockNumber].stProof = result; - await this.pushBlockPairing(flow, blockMergingFlow, blockNumber); - }); - blockSTFlow.deferErrorsTo(flow); - - await flow.forEach(blockTrace.stateTransitionProver, async (stp) => { - await blockSTFlow.pushInput(stp); - }); - } - }); - }); - } -} diff --git a/packages/sequencer/src/protocol/production/TransactionTraceService.ts b/packages/sequencer/src/protocol/production/TransactionTraceService.ts deleted file mode 100644 index 2d225259e..000000000 --- a/packages/sequencer/src/protocol/production/TransactionTraceService.ts +++ /dev/null @@ -1,381 +0,0 @@ -import { injectable, Lifecycle, scoped } from "tsyringe"; -import { - BlockProverPublicInput, - DefaultProvableHashList, - NetworkState, - ProtocolConstants, - ProvableHashList, - ProvableStateTransition, - ProvableStateTransitionType, - StateTransitionProverPublicInput, - StateTransitionType, -} from "@proto-kit/protocol"; -import { MAX_FIELD, RollupMerkleTree } from "@proto-kit/common"; -import { Bool, Field } from "o1js"; -import chunk from "lodash/chunk"; - -import { distinctByString } from "../../helpers/utils"; -import { CachedMerkleTreeStore } from "../../state/merkle/CachedMerkleTreeStore"; -import { CachedStateService } from "../../state/state/CachedStateService"; -import { SyncCachedMerkleTreeStore } from "../../state/merkle/SyncCachedMerkleTreeStore"; -import type { - TransactionExecutionResult, - BlockWithResult, -} from "../../storage/model/Block"; -import { AsyncMerkleTreeStore } from "../../state/async/AsyncMerkleTreeStore"; -import { VerificationKeyService } from "../runtime/RuntimeVerificationKeyService"; - -import type { TransactionTrace, BlockTrace } from "./BatchProducerModule"; -import { UntypedStateTransition } from "./helpers/UntypedStateTransition"; -import { StateTransitionProofParameters } from "./tasks/StateTransitionTask"; - -export type TaskStateRecord = Record; - -@injectable() -@scoped(Lifecycle.ContainerScoped) -export class TransactionTraceService { - private allKeys(stateTransitions: UntypedStateTransition[]): Field[] { - // We have to do the distinct with strings because - // array.indexOf() doesn't work with fields - return stateTransitions.map((st) => st.path).filter(distinctByString); - } - - private async collectStartingState( - stateTransitions: UntypedStateTransition[] - ): Promise { - const stateEntries = stateTransitions - // Filter distinct - .filter( - (st, index, array) => - array.findIndex( - (st2) => st2.path.toBigInt() === st.path.toBigInt() - ) === index - ) - // Filter out STs that have isSome: false as precondition, because this means - // "state hasn't been set before" and has to correlate to a precondition on Field(0) - // and for that the state has to be undefined - .filter((st) => st.fromValue.isSome.toBoolean()) - .map((st) => [st.path.toString(), st.fromValue.value]); - - return Object.fromEntries(stateEntries); - } - - private async applyTransitions( - stateService: CachedStateService, - stateTransitions: UntypedStateTransition[] - ): Promise { - // Use updated stateTransitions since only they will have the - // right values - const writes = stateTransitions - .filter((st) => st.toValue.isSome.toBoolean()) - .map((st) => { - return { key: st.path, value: st.toValue.toFields() }; - }); - stateService.writeStates(writes); - await stateService.commit(); - } - - public async createBlockTrace( - traces: TransactionTrace[], - stateServices: { - stateService: CachedStateService; - merkleStore: CachedMerkleTreeStore; - }, - blockHashTreeStore: AsyncMerkleTreeStore, - beforeBlockStateRoot: Field, - block: BlockWithResult - ): Promise { - const stateTransitions = block.result.blockStateTransitions; - - const startingState = await this.collectStartingState(stateTransitions); - - let stParameters: StateTransitionProofParameters[]; - let fromStateRoot: Field; - - if (stateTransitions.length > 0) { - await this.applyTransitions(stateServices.stateService, stateTransitions); - - ({ stParameters, fromStateRoot } = await this.createMerkleTrace( - stateServices.merkleStore, - [], - stateTransitions, - true - )); - } else { - await stateServices.merkleStore.preloadKey(0n); - - fromStateRoot = Field( - stateServices.merkleStore.getNode(0n, RollupMerkleTree.HEIGHT - 1) ?? - RollupMerkleTree.EMPTY_ROOT - ); - - stParameters = [ - { - stateTransitions: [], - merkleWitnesses: [], - - publicInput: new StateTransitionProverPublicInput({ - stateRoot: fromStateRoot, - protocolStateRoot: fromStateRoot, - stateTransitionsHash: Field(0), - protocolTransitionsHash: Field(0), - }), - }, - ]; - } - - const fromNetworkState = block.block.networkState.before; - - const publicInput = new BlockProverPublicInput({ - transactionsHash: Field(0), - networkStateHash: fromNetworkState.hash(), - stateRoot: beforeBlockStateRoot, - blockHashRoot: block.block.fromBlockHashRoot, - eternalTransactionsHash: block.block.fromEternalTransactionsHash, - incomingMessagesHash: block.block.fromMessagesHash, - blockNumber: block.block.height, - }); - - return { - transactions: traces, - stateTransitionProver: stParameters, - - block: { - networkState: fromNetworkState, - publicInput, - blockWitness: block.result.blockHashWitness, - startingState, - }, - }; - } - - /** - * What is in a trace? - * A trace has two parts: - * 1. start values of storage keys accessed by all state transitions - * 2. Merkle Witnesses of the keys accessed by the state transitions - * - * How do we create a trace? - * - * 1. We execute the transaction and create the stateTransitions - * The first execution is done with a DummyStateService to find out the - * accessed keys that can then be cached for the actual run, which generates - * the correct state transitions and has to be done for the next - * transactions to be based on the correct state. - * - * 2. We extract the accessed keys, download the state and put it into - * AppChainProveParams - * - * 3. We retrieve merkle witnesses for each step and put them into - * StateTransitionProveParams - */ - public async createTransactionTrace( - executionResult: TransactionExecutionResult, - stateServices: { - stateService: CachedStateService; - merkleStore: CachedMerkleTreeStore; - }, - verificationKeyService: VerificationKeyService, - networkState: NetworkState, - bundleTracker: ProvableHashList, - eternalBundleTracker: ProvableHashList, - messageTracker: ProvableHashList - ): Promise { - const { stateTransitions, protocolTransitions, status, tx } = - executionResult; - - // Collect starting state - const protocolStartingState = - await this.collectStartingState(protocolTransitions); - - await this.applyTransitions( - stateServices.stateService, - protocolTransitions - ); - - const runtimeStartingState = - await this.collectStartingState(stateTransitions); - - if (status.toBoolean()) { - await this.applyTransitions(stateServices.stateService, stateTransitions); - } - - // Step 3 - const { stParameters, fromStateRoot } = await this.createMerkleTrace( - stateServices.merkleStore, - stateTransitions, - protocolTransitions, - status.toBoolean() - ); - - const transactionsHash = bundleTracker.commitment; - const eternalTransactionsHash = eternalBundleTracker.commitment; - const incomingMessagesHash = messageTracker.commitment; - - if (tx.isMessage) { - messageTracker.push(tx.hash()); - } else { - bundleTracker.push(tx.hash()); - eternalBundleTracker.push(tx.hash()); - } - - const signedTransaction = tx.toProtocolTransaction(); - - const verificationKeyAttestation = verificationKeyService.getAttestation( - tx.methodId.toBigInt() - ); - - return { - runtimeProver: { - tx, - state: runtimeStartingState, - networkState, - }, - - stateTransitionProver: stParameters, - - blockProver: { - publicInput: { - stateRoot: fromStateRoot, - transactionsHash, - eternalTransactionsHash, - incomingMessagesHash, - networkStateHash: networkState.hash(), - blockHashRoot: Field(0), - blockNumber: MAX_FIELD, - }, - - executionData: { - networkState, - transaction: signedTransaction.transaction, - signature: signedTransaction.signature, - }, - - startingState: protocolStartingState, - verificationKeyAttestation, - }, - }; - } - - private async createMerkleTrace( - merkleStore: CachedMerkleTreeStore, - stateTransitions: UntypedStateTransition[], - protocolTransitions: UntypedStateTransition[], - runtimeSuccess: boolean - ): Promise<{ - stParameters: StateTransitionProofParameters[]; - fromStateRoot: Field; - }> { - const keys = this.allKeys(protocolTransitions.concat(stateTransitions)); - - const runtimeSimulationMerkleStore = new SyncCachedMerkleTreeStore( - merkleStore - ); - - await merkleStore.preloadKeys(keys.map((key) => key.toBigInt())); - - const tree = new RollupMerkleTree(merkleStore); - const runtimeTree = new RollupMerkleTree(runtimeSimulationMerkleStore); - // const runtimeTree = new RollupMerkleTree(merkleStore); - const initialRoot = tree.getRoot(); - - const transitionsList = new DefaultProvableHashList( - ProvableStateTransition - ); - const protocolTransitionsList = new DefaultProvableHashList( - ProvableStateTransition - ); - - const allTransitions = protocolTransitions - .map< - [UntypedStateTransition, boolean] - >((protocolTransition) => [protocolTransition, StateTransitionType.protocol]) - .concat( - stateTransitions.map((transition) => [ - transition, - StateTransitionType.normal, - ]) - ); - - let stateRoot = initialRoot; - let protocolStateRoot = initialRoot; - - const stParameters = chunk( - allTransitions, - ProtocolConstants.stateTransitionProverBatchSize - ).map((currentChunk, index) => { - const fromStateRoot = stateRoot; - const fromProtocolStateRoot = protocolStateRoot; - - const stateTransitionsHash = transitionsList.commitment; - const protocolTransitionsHash = protocolTransitionsList.commitment; - - // Map all STs to traces for current chunk - - const merkleWitnesses = currentChunk.map(([transition, type]) => { - // Select respective tree (whether type is protocol - // (which will be applied no matter what) - // or runtime (which might be thrown away) - const usedTree = StateTransitionType.isProtocol(type) - ? tree - : runtimeTree; - - const provableTransition = transition.toProvable(); - - const witness = usedTree.getWitness(provableTransition.path.toBigInt()); - - if (provableTransition.to.isSome.toBoolean()) { - usedTree.setLeaf( - provableTransition.path.toBigInt(), - provableTransition.to.value - ); - - stateRoot = usedTree.getRoot(); - if (StateTransitionType.isProtocol(type)) { - protocolStateRoot = stateRoot; - } - } - - // Push transition to respective hashlist - (StateTransitionType.isNormal(type) - ? transitionsList - : protocolTransitionsList - ).pushIf( - provableTransition, - provableTransition.path.equals(Field(0)).not() - ); - - return witness; - }); - - return { - merkleWitnesses, - - stateTransitions: currentChunk.map(([st, type]) => { - return { - transition: st.toProvable(), - type: new ProvableStateTransitionType({ type: Bool(type) }), - }; - }), - - publicInput: { - stateRoot: fromStateRoot, - protocolStateRoot: fromProtocolStateRoot, - stateTransitionsHash, - protocolTransitionsHash, - }, - }; - }); - - // If runtime succeeded, merge runtime changes into parent, - // otherwise throw them away - if (runtimeSuccess) { - runtimeSimulationMerkleStore.mergeIntoParent(); - } - - return { - stParameters, - fromStateRoot: initialRoot, - }; - } -} diff --git a/packages/sequencer/src/protocol/production/flow/BatchFlow.ts b/packages/sequencer/src/protocol/production/flow/BatchFlow.ts new file mode 100644 index 000000000..045b0493f --- /dev/null +++ b/packages/sequencer/src/protocol/production/flow/BatchFlow.ts @@ -0,0 +1,139 @@ +import { inject, injectable, Lifecycle, scoped } from "tsyringe"; +import { + BlockProof, + MandatoryProtocolModulesRecord, + Protocol, + StateTransitionProverPublicInput, + StateTransitionProverPublicOutput, +} from "@proto-kit/protocol"; +import { + isFull, + mapSequential, + MAX_FIELD, + Nullable, + range, +} from "@proto-kit/common"; + +import { FlowCreator } from "../../../worker/flow/Flow"; +import { NewBlockProvingParameters, NewBlockTask } from "../tasks/NewBlockTask"; +import { BlockReductionTask } from "../tasks/BlockReductionTask"; +import { BatchTrace } from "../tracing/BatchTracingService"; + +import { ReductionTaskFlow } from "./ReductionTaskFlow"; +import { StateTransitionFlow } from "./StateTransitionFlow"; +import { BlockFlow } from "./BlockFlow"; + +@injectable() +@scoped(Lifecycle.ContainerScoped) +export class BatchFlow { + public constructor( + private readonly flowCreator: FlowCreator, + private readonly blockProvingTask: NewBlockTask, + private readonly blockReductionTask: BlockReductionTask, + private readonly stateTransitionFlow: StateTransitionFlow, + private readonly blockFlow: BlockFlow, + @inject("Protocol") + private readonly protocol: Protocol + ) {} + + private isBlockProofsMergable(a: BlockProof, b: BlockProof): boolean { + // TODO Proper replication of merge logic + const part1 = a.publicOutput.stateRoot + .equals(b.publicInput.stateRoot) + .and(a.publicOutput.blockHashRoot.equals(b.publicInput.blockHashRoot)) + .and( + a.publicOutput.networkStateHash.equals(b.publicInput.networkStateHash) + ) + .and( + a.publicOutput.eternalTransactionsHash.equals( + b.publicInput.eternalTransactionsHash + ) + ) + .and(a.publicOutput.closed.equals(b.publicOutput.closed)) + .toBoolean(); + + const proof1Closed = a.publicOutput.closed; + const proof2Closed = b.publicOutput.closed; + + const blockNumberProgressionValid = a.publicOutput.blockNumber.equals( + b.publicInput.blockNumber + ); + + const isValidTransactionMerge = a.publicInput.blockNumber + .equals(MAX_FIELD) + .and(blockNumberProgressionValid) + .and(proof1Closed.or(proof2Closed).not()); + + const isValidClosedMerge = proof1Closed + .and(proof2Closed) + .and(blockNumberProgressionValid); + + return part1 && isValidClosedMerge.or(isValidTransactionMerge).toBoolean(); + } + + private async pushBlockInput( + inputs: Nullable, + batchFlow: ReductionTaskFlow + ) { + if (isFull(inputs)) { + await batchFlow.pushInput(inputs); + } + } + + private dummySTProof() { + return this.protocol.stateTransitionProver.zkProgrammable.zkProgram[0].Proof.dummy( + StateTransitionProverPublicInput.empty(), + StateTransitionProverPublicOutput.empty(), + 2 + ); + } + + public async executeBatch(batch: BatchTrace, batchId: number) { + const batchFlow = new ReductionTaskFlow( + { + name: `batch-${batchId}`, + inputLength: batch.blocks.length, + mappingTask: this.blockProvingTask, + reductionTask: this.blockReductionTask, + mergableFunction: this.isBlockProofsMergable, + }, + this.flowCreator + ); + + const map: Record< + number, + Nullable + > = Object.fromEntries( + batch.blocks.map((trace, i) => [ + i, + { params: trace.blockParams, input1: undefined, input2: undefined }, + ]) + ); + + const dummySTProof = await this.dummySTProof(); + range(0, batch.blocks.length - 1).forEach((index) => { + map[index].input1 = dummySTProof; + }); + + await this.stateTransitionFlow.executeBatches( + batch.stateTransitionTrace, + batchId, + async (proof) => { + const index = batch.blocks.length - 1; + map[index].input1 = proof; + await this.pushBlockInput(map[index], batchFlow); + } + ); + + await mapSequential(batch.blocks, async (trace, blockIndex) => { + await this.blockFlow.executeBlock(trace, async (proof) => { + map[blockIndex].input2 = proof; + await this.pushBlockInput(map[blockIndex], batchFlow); + }); + }); + + return await new Promise((res, rej) => { + batchFlow.onCompletion(async (result) => res(result)); + }); + } +} diff --git a/packages/sequencer/src/protocol/production/flow/BlockFlow.ts b/packages/sequencer/src/protocol/production/flow/BlockFlow.ts new file mode 100644 index 000000000..8dea8b59c --- /dev/null +++ b/packages/sequencer/src/protocol/production/flow/BlockFlow.ts @@ -0,0 +1,114 @@ +import { inject, injectable, Lifecycle, scoped } from "tsyringe"; +import { + BlockProof, + BlockProverPublicInput, + BlockProverPublicOutput, + MandatoryProtocolModulesRecord, + Protocol, +} from "@proto-kit/protocol"; +import { Bool, Field } from "o1js"; +import { MAX_FIELD } from "@proto-kit/common"; + +import { TransactionProvingTask } from "../tasks/TransactionProvingTask"; +import { BlockReductionTask } from "../tasks/BlockReductionTask"; +import { TransactionProvingTaskParameters } from "../tasks/serializers/types/TransactionProvingTypes"; +import { FlowCreator } from "../../../worker/flow/Flow"; +import { BlockTrace } from "../tracing/BlockTracingService"; + +import { ReductionTaskFlow } from "./ReductionTaskFlow"; +import { TransactionFlow } from "./TransactionFlow"; + +@injectable() +@scoped(Lifecycle.ContainerScoped) +export class BlockFlow { + public constructor( + private readonly flowCreator: FlowCreator, + @inject("Protocol") + private readonly protocol: Protocol, + private readonly transactionProvingTask: TransactionProvingTask, + private readonly blockReductionTask: BlockReductionTask, + private readonly transactionFlow: TransactionFlow + ) {} + + private async dummyTransactionProof(trace: BlockTrace) { + const publicInput = { + ...trace.blockParams.publicInput, + networkStateHash: Field(0), + transactionsHash: Field(0), + blockHashRoot: Field(0), + blockNumber: MAX_FIELD, + } satisfies BlockProverPublicInput; + + // TODO Set publicInput.stateRoot to result after block hooks! + const publicOutput = new BlockProverPublicOutput({ + ...publicInput, + closed: Bool(true), + }); + + return await this.protocol.blockProver.zkProgrammable.zkProgram[0].Proof.dummy( + publicInput, + publicOutput, + 2 + ); + } + + private async executeTransactions( + trace: BlockTrace + ): Promise> { + const transactionFlow = new ReductionTaskFlow( + { + name: `transactions-${trace.height}`, + inputLength: trace.transactions.length, + mappingTask: this.transactionProvingTask, + reductionTask: this.blockReductionTask, + + mergableFunction: (a, b) => + a.publicOutput.stateRoot + .equals(b.publicInput.stateRoot) + .and( + a.publicOutput.transactionsHash.equals( + b.publicInput.transactionsHash + ) + ) + .and( + a.publicInput.networkStateHash.equals( + b.publicInput.networkStateHash + ) + ) + .toBoolean(), + }, + this.flowCreator + ); + + await transactionFlow.flow.forEach( + trace.transactions, + async (transactionTrace, txIndex) => { + await this.transactionFlow.proveRuntimes( + transactionTrace, + trace.height, + txIndex, + async (parameters) => { + await transactionFlow.pushInput(parameters); + } + ); + } + ); + + return transactionFlow; + } + + public async executeBlock( + trace: BlockTrace, + callback: (proof: BlockProof) => Promise + ) { + if (trace.transactions.length === 0) { + const proof = await this.dummyTransactionProof(trace); + await callback(proof); + } else { + const flow = await this.executeTransactions(trace); + flow.onCompletion(async (result) => { + await callback(result); + }); + } + } +} diff --git a/packages/sequencer/src/protocol/production/flow/ReductionTaskFlow.ts b/packages/sequencer/src/protocol/production/flow/ReductionTaskFlow.ts index 95f28ba15..1ae3ec92b 100644 --- a/packages/sequencer/src/protocol/production/flow/ReductionTaskFlow.ts +++ b/packages/sequencer/src/protocol/production/flow/ReductionTaskFlow.ts @@ -28,7 +28,7 @@ export interface PairingDerivedInput { * We use this pattern extensively in our pipeline, */ export class ReductionTaskFlow { - private readonly flow: Flow>; + public readonly flow: Flow>; private started = false; diff --git a/packages/sequencer/src/protocol/production/flow/StateTransitionFlow.ts b/packages/sequencer/src/protocol/production/flow/StateTransitionFlow.ts new file mode 100644 index 000000000..6321920b0 --- /dev/null +++ b/packages/sequencer/src/protocol/production/flow/StateTransitionFlow.ts @@ -0,0 +1,93 @@ +import { inject, injectable, Lifecycle, scoped } from "tsyringe"; +import { + MandatoryProtocolModulesRecord, + Protocol, + StateTransitionProof, + StateTransitionProverPublicInput, + StateTransitionProverPublicOutput, +} from "@proto-kit/protocol"; +import { Field } from "o1js"; + +import { FlowCreator } from "../../../worker/flow/Flow"; +import { + StateTransitionProofParameters, + StateTransitionTask, +} from "../tasks/StateTransitionTask"; +import { StateTransitionReductionTask } from "../tasks/StateTransitionReductionTask"; + +import { ReductionTaskFlow } from "./ReductionTaskFlow"; + +@injectable() +@scoped(Lifecycle.ContainerScoped) +export class StateTransitionFlow { + public constructor( + @inject("Protocol") + private readonly protocol: Protocol, + private readonly flowCreator: FlowCreator, + private readonly stateTransitionTask: StateTransitionTask, + private readonly stateTransitionReductionTask: StateTransitionReductionTask + ) {} + + private async dummySTProof(): Promise { + const emptyInputOutput: StateTransitionProverPublicInput & + StateTransitionProverPublicOutput = { + root: Field(0), + currentBatchStateHash: Field(0), + batchesHash: Field(0), + witnessedRootsHash: Field(0), + }; + + return await this.protocol.stateTransitionProver.zkProgrammable.zkProgram[0].Proof.dummy( + emptyInputOutput, + emptyInputOutput, + 2 + ); + } + + private createFlow(name: string, inputLength: number) { + return new ReductionTaskFlow( + { + name, + inputLength, + mappingTask: this.stateTransitionTask, + reductionTask: this.stateTransitionReductionTask, + + mergableFunction: (a, b) => + a.publicOutput.root + .equals(b.publicInput.root) + .and( + a.publicOutput.witnessedRootsHash.equals( + b.publicInput.witnessedRootsHash + ) + ) + .and( + a.publicOutput.currentBatchStateHash.equals( + b.publicInput.currentBatchStateHash + ) + ) + .and(a.publicOutput.batchesHash.equals(b.publicInput.batchesHash)) + .toBoolean(), + }, + this.flowCreator + ); + } + + public async executeBatches( + trace: StateTransitionProofParameters[], + batchId: number, + callback: (result: StateTransitionProof) => Promise + ) { + if (trace.length > 0) { + const flow = this.createFlow(`st-proof-${batchId}`, trace.length); + + await flow.flow.forEach(trace, async (input) => { + await flow.pushInput(input); + }); + + flow.onCompletion(callback); + } else { + const dummy = await this.dummySTProof(); + await callback(dummy); + } + } +} diff --git a/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts b/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts new file mode 100644 index 000000000..d124a352c --- /dev/null +++ b/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts @@ -0,0 +1,89 @@ +import { injectable } from "tsyringe"; + +import { Flow, FlowCreator } from "../../../worker/flow/Flow"; +import { + RuntimeProof, + TransactionProvingTaskParameters, + TransactionProvingType, +} from "../tasks/serializers/types/TransactionProvingTypes"; +import { RuntimeProvingTask } from "../tasks/RuntimeProvingTask"; +import { TransactionTrace } from "../tracing/TransactionTracingService"; + +@injectable() +export class TransactionFlow { + public constructor( + private readonly flowCreator: FlowCreator, + private readonly runtimeProvingTask: RuntimeProvingTask + ) {} + + private async resolveTransactionFlow( + flow: Flow<{ + runtimeProofs: { proof: RuntimeProof; index: number }[]; + }>, + trace: TransactionTrace, + callback: (params: TransactionProvingTaskParameters) => Promise + ) { + const requiredLength = trace.type === TransactionProvingType.MULTI ? 2 : 1; + + if (flow.state.runtimeProofs.length === requiredLength) { + let parameters: TransactionProvingTaskParameters; + + if (trace.type === TransactionProvingType.MULTI) { + // Sort ascending + const sorted = flow.state.runtimeProofs.sort( + ({ index: a }, { index: b }) => a - b + ); + parameters = { + type: trace.type, + parameters: trace.transaction, + proof1: sorted[0].proof, + proof2: sorted[1].proof, + }; + } else { + parameters = { + type: trace.type, + parameters: trace.transaction, + proof1: flow.state.runtimeProofs[0].proof, + }; + } + + await callback(parameters); + } + } + + public async proveRuntimes( + trace: TransactionTrace, + blockHeight: string, + txIndex: number, + callback: (params: TransactionProvingTaskParameters) => Promise + ) { + const name = `transaction-${blockHeight}-${txIndex}${ + trace.type === TransactionProvingType.MULTI ? "-double" : "" + }`; + const flow = this.flowCreator.createFlow<{ + runtimeProofs: { proof: RuntimeProof; index: number }[]; + }>(name, { + runtimeProofs: [], + }); + + await flow.pushTask( + this.runtimeProvingTask, + trace.runtime[0], + async (proof) => { + flow.state.runtimeProofs.push({ proof, index: 0 }); + await this.resolveTransactionFlow(flow, trace, callback); + } + ); + + if (trace.type === TransactionProvingType.MULTI) { + await flow.pushTask( + this.runtimeProvingTask, + trace.runtime[1], + async (proof) => { + flow.state.runtimeProofs.push({ proof, index: 1 }); + await this.resolveTransactionFlow(flow, trace, callback); + } + ); + } + } +} diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts b/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts index 83fe9bbbf..592fab6b8 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts @@ -23,7 +23,6 @@ import { BlockResult, BlockWithResult, } from "../../../storage/model/Block"; -import { CachedStateService } from "../../../state/state/CachedStateService"; import { MessageStorage } from "../../../storage/repositories/MessageStorage"; import { Database } from "../../../storage/Database"; @@ -108,16 +107,18 @@ export class BlockProducerModule extends SequencerModule { } public async generateMetadata(block: Block): Promise { - const { result, blockHashTreeStore, treeStore } = + const { result, blockHashTreeStore, treeStore, stateService } = await this.resultService.generateMetadataForNextBlock( block, this.unprovenMerkleStore, - this.blockTreeStore + this.blockTreeStore, + this.unprovenStateService ); await this.database.executeInTransaction(async () => { await blockHashTreeStore.mergeIntoParent(); await treeStore.mergeIntoParent(); + await stateService.mergeIntoParent(); await this.blockQueue.pushResult(result); }); @@ -210,27 +211,25 @@ export class BlockProducerModule extends SequencerModule { return undefined; } - const cachedStateService = new CachedStateService( - this.unprovenStateService - ); - - const block = await this.productionService.createBlock( - cachedStateService, + const blockResult = await this.productionService.createBlock( + this.unprovenStateService, txs, metadata, this.allowEmptyBlock() ); - if (block !== undefined) { + if (blockResult !== undefined) { + const { block, stateChanges } = blockResult; + await this.database.executeInTransaction(async () => { - await cachedStateService.mergeIntoParent(); + await stateChanges.mergeIntoParent(); await this.blockQueue.pushBlock(block); }); } this.productionInProgress = false; - return block; + return blockResult?.block; } public async blockResultCompleteCheck() { diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts b/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts index b36c3b004..6ff7d9e03 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts @@ -1,13 +1,17 @@ import { inject, injectable, Lifecycle, scoped } from "tsyringe"; import { - DefaultProvableHashList, + BeforeBlockHookArguments, MandatoryProtocolModulesRecord, - MinaActions, MinaActionsHashList, NetworkState, Protocol, ProtocolModulesRecord, ProvableBlockHook, + reduceStateTransitions, + RuntimeTransaction, + StateServiceProvider, + toProvableHookBlockState, + TransactionHashList, } from "@proto-kit/protocol"; import { Field } from "o1js"; import { log } from "@proto-kit/common"; @@ -19,8 +23,14 @@ import { } from "../../../storage/model/Block"; import { CachedStateService } from "../../../state/state/CachedStateService"; import { PendingTransaction } from "../../../mempool/PendingTransaction"; +import { AsyncStateService } from "../../../state/async/AsyncStateService"; +import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; -import { TransactionExecutionService } from "./TransactionExecutionService"; +import { + BlockTrackers, + executeWithExecutionContext, + TransactionExecutionService, +} from "./TransactionExecutionService"; @injectable() @scoped(Lifecycle.ContainerScoped) @@ -30,73 +40,110 @@ export class BlockProductionService { public constructor( @inject("Protocol") protocol: Protocol, - private readonly transactionExecutionService: TransactionExecutionService + private readonly transactionExecutionService: TransactionExecutionService, + @inject("StateServiceProvider") + private readonly stateServiceProvider: StateServiceProvider ) { this.blockHooks = protocol.dependencyContainer.resolveAll("ProvableBlockHook"); } + public async executeBeforeBlockHook( + args: BeforeBlockHookArguments, + inputNetworkState: NetworkState, + cachedStateService: CachedStateService + ) { + this.stateServiceProvider.setCurrentStateService(cachedStateService); + + // Execute afterBlock hooks + const context = { + networkState: inputNetworkState, + transaction: RuntimeTransaction.dummyTransaction(), + }; + + const executionResult = await executeWithExecutionContext( + async () => + await this.blockHooks.reduce>( + async (networkState, hook) => + await hook.beforeBlock(await networkState, args), + Promise.resolve(inputNetworkState) + ), + context + ); + + this.stateServiceProvider.popCurrentStateService(); + await cachedStateService.applyStateTransitions( + executionResult.stateTransitions + ); + + return executionResult; + } + /** * Main entry point for creating a unproven block with everything * attached that is needed for tracing */ public async createBlock( - stateService: CachedStateService, + asyncStateService: AsyncStateService, transactions: PendingTransaction[], lastBlockWithResult: BlockWithResult, allowEmptyBlocks: boolean - ): Promise { + ): Promise< + | { + block: Block; + stateChanges: CachedStateService; + } + | undefined + > { + const stateService = new CachedStateService(asyncStateService); + const lastResult = lastBlockWithResult.result; const lastBlock = lastBlockWithResult.block; const executionResults: TransactionExecutionResult[] = []; - const transactionsHashList = new DefaultProvableHashList(Field); - const eternalTransactionsHashList = new DefaultProvableHashList( - Field, - Field(lastBlock.toEternalTransactionsHash) - ); - const incomingMessagesList = new MinaActionsHashList( Field(lastBlock.toMessagesHash) ); + let blockState: BlockTrackers = { + blockHashRoot: Field(lastResult.blockHashRoot), + eternalTransactionsList: new TransactionHashList( + lastBlock.toEternalTransactionsHash + ), + transactionList: new TransactionHashList(), + incomingMessages: new MinaActionsHashList(lastBlock.toMessagesHash), + }; + // Get used networkState by executing beforeBlock() hooks - const networkState = await this.blockHooks.reduce>( - async (reduceNetworkState, hook) => - await hook.beforeBlock(await reduceNetworkState, { - blockHashRoot: Field(lastResult.blockHashRoot), - eternalTransactionsHash: lastBlock.toEternalTransactionsHash, - stateRoot: Field(lastResult.stateRoot), - transactionsHash: Field(0), - networkStateHash: lastResult.afterNetworkState.hash(), - incomingMessagesHash: lastBlock.toMessagesHash, - }), - Promise.resolve(lastResult.afterNetworkState) + const beforeHookResult = await this.executeBeforeBlockHook( + toProvableHookBlockState(blockState), + lastResult.afterNetworkState, + stateService + ); + + const networkState = beforeHookResult.methodResult; + const beforeBlockStateTransitions = reduceStateTransitions( + beforeHookResult.stateTransitions + ).map((transition) => + UntypedStateTransition.fromStateTransition(transition) ); for (const tx of transactions) { try { // Create execution trace - const executionTrace = + const [newState, executionTrace] = // eslint-disable-next-line no-await-in-loop await this.transactionExecutionService.createExecutionTrace( stateService, tx, - networkState + networkState, + blockState ); + blockState = newState; + // Push result to results and transaction onto bundle-hash executionResults.push(executionTrace); - if (!tx.isMessage) { - transactionsHashList.push(tx.hash()); - eternalTransactionsHashList.push(tx.hash()); - } else { - const actionHash = MinaActions.actionHash( - tx.toRuntimeTransaction().hashData() - ); - - incomingMessagesList.push(actionHash); - } } catch (error) { if (error instanceof Error) { log.error("Error in inclusion of tx, skipping", error); @@ -109,20 +156,21 @@ export class BlockProductionService { if (executionResults.length === 0 && !allowEmptyBlocks) { log.info( - "After sequencing, block has no sequencable transactions left, skipping block" + "After sequencing, block has no sequenceable transactions left, skipping block" ); return undefined; } const block: Omit = { transactions: executionResults, - transactionsHash: transactionsHashList.commitment, + transactionsHash: blockState.transactionList.commitment, fromEternalTransactionsHash: lastBlock.toEternalTransactionsHash, - toEternalTransactionsHash: eternalTransactionsHashList.commitment, + toEternalTransactionsHash: blockState.eternalTransactionsList.commitment, height: lastBlock.hash.toBigInt() !== 0n ? lastBlock.height.add(1) : Field(0), fromBlockHashRoot: Field(lastResult.blockHashRoot), fromMessagesHash: lastBlock.toMessagesHash, + fromStateRoot: Field(lastResult.stateRoot), toMessagesHash: incomingMessagesList.commitment, previousBlockHash, @@ -130,13 +178,17 @@ export class BlockProductionService { before: new NetworkState(lastResult.afterNetworkState), during: networkState, }, + beforeBlockStateTransitions, }; const hash = Block.hash(block); return { - ...block, - hash, + block: { + ...block, + hash, + }, + stateChanges: stateService, }; } } diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts b/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts index 41c3d9ba5..dcebfef0a 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts @@ -1,15 +1,16 @@ import { Bool, Field, Poseidon } from "o1js"; import { RollupMerkleTree } from "@proto-kit/common"; import { + AfterBlockHookArguments, BlockHashMerkleTree, BlockHashTreeEntry, - BlockProverState, MandatoryProtocolModulesRecord, NetworkState, Protocol, ProtocolModulesRecord, ProvableBlockHook, RuntimeTransaction, + StateServiceProvider, } from "@proto-kit/protocol"; import { inject, injectable, Lifecycle, scoped } from "tsyringe"; @@ -21,6 +22,8 @@ import { import { AsyncMerkleTreeStore } from "../../../state/async/AsyncMerkleTreeStore"; import { CachedMerkleTreeStore } from "../../../state/merkle/CachedMerkleTreeStore"; import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; +import { CachedStateService } from "../../../state/state/CachedStateService"; +import { AsyncStateService } from "../../../state/async/AsyncStateService"; import type { StateRecord } from "../BatchProducerModule"; import { executeWithExecutionContext } from "./TransactionExecutionService"; @@ -39,13 +42,19 @@ function collectStateDiff( ); } -function createCombinedStateDiff(transactions: TransactionExecutionResult[]) { +function createCombinedStateDiff( + transactions: TransactionExecutionResult[], + blockHookSTs: UntypedStateTransition[] +) { // Flatten diff list into a single diff by applying them over each other return transactions .map((tx) => { - const transitions = tx.protocolTransitions.concat( - tx.status.toBoolean() ? tx.stateTransitions : [] - ); + const transitions = tx.stateTransitions + .filter(({ applied }) => applied) + .flatMap(({ stateTransitions }) => stateTransitions); + + transitions.push(...blockHookSTs); + return collectStateDiff(transitions); }) .reduce((accumulator, diff) => { @@ -61,62 +70,25 @@ export class BlockResultService { public constructor( @inject("Protocol") - protocol: Protocol + protocol: Protocol, + @inject("StateServiceProvider") + private readonly stateServiceProvider: StateServiceProvider ) { this.blockHooks = protocol.dependencyContainer.resolveAll("ProvableBlockHook"); } - public async generateMetadataForNextBlock( - block: Block, - merkleTreeStore: AsyncMerkleTreeStore, - blockHashTreeStore: AsyncMerkleTreeStore, - modifyTreeStore = true - ): Promise<{ - result: BlockResult; - treeStore: CachedMerkleTreeStore; - blockHashTreeStore: CachedMerkleTreeStore; - }> { - const combinedDiff = createCombinedStateDiff(block.transactions); - - const inMemoryStore = new CachedMerkleTreeStore(merkleTreeStore); - const tree = new RollupMerkleTree(inMemoryStore); - const blockHashInMemoryStore = new CachedMerkleTreeStore( - blockHashTreeStore - ); - const blockHashTree = new BlockHashMerkleTree(blockHashInMemoryStore); - - await inMemoryStore.preloadKeys(Object.keys(combinedDiff).map(BigInt)); - - // In case the diff is empty, we preload key 0 in order to - // retrieve the root, which we need later - if (Object.keys(combinedDiff).length === 0) { - await inMemoryStore.preloadKey(0n); - } - - // TODO This can be optimized a lot (we are only interested in the root at this step) - await blockHashInMemoryStore.preloadKey(block.height.toBigInt()); - - Object.entries(combinedDiff).forEach(([key, state]) => { - const treeValue = state !== undefined ? Poseidon.hash(state) : Field(0); - tree.setLeaf(BigInt(key), treeValue); - }); - - const stateRoot = tree.getRoot(); - const fromBlockHashRoot = blockHashTree.getRoot(); - - const state: BlockProverState = { - stateRoot, - transactionsHash: block.transactionsHash, - networkStateHash: block.networkState.during.hash(), - eternalTransactionsHash: block.toEternalTransactionsHash, - blockHashRoot: fromBlockHashRoot, - incomingMessagesHash: block.toMessagesHash, - }; + public async executeAfterBlockHook( + args: AfterBlockHookArguments, + inputNetworkState: NetworkState, + asyncStateService: AsyncStateService + ) { + const cachedStateService = new CachedStateService(asyncStateService); + this.stateServiceProvider.setCurrentStateService(cachedStateService); - // TODO Set StateProvider for @state access to state + // Execute afterBlock hooks const context = { - networkState: block.networkState.during, + networkState: inputNetworkState, transaction: RuntimeTransaction.dummyTransaction(), }; @@ -124,39 +96,148 @@ export class BlockResultService { async () => await this.blockHooks.reduce>( async (networkState, hook) => - await hook.afterBlock(await networkState, state), - Promise.resolve(block.networkState.during) + await hook.afterBlock(await networkState, args), + Promise.resolve(inputNetworkState) ), context ); - const { stateTransitions, methodResult } = executionResult; + this.stateServiceProvider.popCurrentStateService(); + await cachedStateService.applyStateTransitions( + executionResult.stateTransitions + ); + + return { + executionResult, + cachedStateService, + }; + } + + /** Update the block hash tree with this block */ + private async insertIntoBlockHashTree( + block: Block, + blockHashTreeStore: AsyncMerkleTreeStore + ) { + const blockHashInMemoryStore = new CachedMerkleTreeStore( + blockHashTreeStore + ); + + // TODO This can be optimized a lot (we are only interested in the root at this step) + await blockHashInMemoryStore.preloadKey(block.height.toBigInt()); + + const blockHashTree = new BlockHashMerkleTree(blockHashInMemoryStore); - // Update the block hash tree with this block blockHashTree.setLeaf( block.height.toBigInt(), new BlockHashTreeEntry({ - blockHash: Poseidon.hash([block.height, state.transactionsHash]), + block: { + index: block.height, + transactionListHash: block.transactionsHash, + }, closed: Bool(true), }).hash() ); const blockHashWitness = blockHashTree.getWitness(block.height.toBigInt()); const newBlockHashRoot = blockHashTree.getRoot(); + return { + blockHashWitness, + blockHashRoot: newBlockHashRoot, + cachedBlockHashTreeStore: blockHashInMemoryStore, + }; + } + + public async applyStateDiff( + store: CachedMerkleTreeStore, + stateDiff: StateRecord + ): Promise { + await store.preloadKeys(Object.keys(stateDiff).map(BigInt)); + + // In case the diff is empty, we preload key 0 in order to + // retrieve the root, which we need later + if (Object.keys(stateDiff).length === 0) { + await store.preloadKey(0n); + } + + const tree = new RollupMerkleTree(store); + + Object.entries(stateDiff).forEach(([key, state]) => { + const treeValue = state !== undefined ? Poseidon.hash(state) : Field(0); + tree.setLeaf(BigInt(key), treeValue); + }); + + return tree; + } + + public async generateMetadataForNextBlock( + block: Block, + merkleTreeStore: AsyncMerkleTreeStore, + blockHashTreeStore: AsyncMerkleTreeStore, + stateService: AsyncStateService + ): Promise<{ + result: BlockResult; + treeStore: CachedMerkleTreeStore; + blockHashTreeStore: CachedMerkleTreeStore; + stateService: CachedStateService; + }> { + const combinedDiff = createCombinedStateDiff( + block.transactions, + block.beforeBlockStateTransitions + ); + + const inMemoryStore = new CachedMerkleTreeStore(merkleTreeStore); + + const tree = await this.applyStateDiff(inMemoryStore, combinedDiff); + + const witnessedStateRoot = tree.getRoot(); + + const { blockHashWitness, blockHashRoot, cachedBlockHashTreeStore } = + await this.insertIntoBlockHashTree(block, blockHashTreeStore); + + const { + executionResult: { stateTransitions, methodResult }, + cachedStateService, + } = await this.executeAfterBlockHook( + { + blockHashRoot, + stateRoot: witnessedStateRoot, + incomingMessagesHash: block.toMessagesHash, + transactionsHash: block.transactionsHash, + eternalTransactionsHash: block.toEternalTransactionsHash, + }, + block.networkState.during, + stateService + ); + + // Apply afterBlock STs to the tree + const tree2 = await this.applyStateDiff( + inMemoryStore, + collectStateDiff( + stateTransitions.map((stateTransition) => + UntypedStateTransition.fromStateTransition(stateTransition) + ) + ) + ); + + const stateRoot = tree2.getRoot(); + return { result: { afterNetworkState: methodResult, + // This is the state root after the last tx and the afterBlock hook stateRoot: stateRoot.toBigInt(), - blockHashRoot: newBlockHashRoot.toBigInt(), + witnessedRoots: [witnessedStateRoot.toBigInt()], + blockHashRoot: blockHashRoot.toBigInt(), blockHashWitness, - blockStateTransitions: stateTransitions.map((st) => + afterBlockStateTransitions: stateTransitions.map((st) => UntypedStateTransition.fromStateTransition(st) ), blockHash: block.hash.toBigInt(), }, treeStore: inMemoryStore, - blockHashTreeStore: blockHashInMemoryStore, + blockHashTreeStore: cachedBlockHashTreeStore, + stateService: cachedStateService, }; } } diff --git a/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts b/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts index 922826e44..d013c9a0c 100644 --- a/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts @@ -1,6 +1,7 @@ +import assert from "node:assert"; + import { container, inject, injectable, Lifecycle, scoped } from "tsyringe"; import { - BlockProverExecutionData, NetworkState, Protocol, ProtocolModulesRecord, @@ -12,19 +13,34 @@ import { MandatoryProtocolModulesRecord, reduceStateTransitions, StateTransition, + BlockProver, + BlockProverProgrammable, + BeforeTransactionHookArguments, + AfterTransactionHookArguments, + BlockProverState, + MethodPublicOutput, + toBeforeTransactionHookArgument, + toAfterTransactionHookArgument, } from "@proto-kit/protocol"; -import { Field } from "o1js"; +import { Bool, Field } from "o1js"; import { AreProofsEnabled, log, mapSequential } from "@proto-kit/common"; import { MethodParameterEncoder, Runtime, RuntimeModule, RuntimeModulesRecord, + toEventsHash, + toStateTransitionsHash, } from "@proto-kit/module"; +// eslint-disable-next-line import/no-extraneous-dependencies +import zip from "lodash/zip"; import { PendingTransaction } from "../../../mempool/PendingTransaction"; import { CachedStateService } from "../../../state/state/CachedStateService"; -import { TransactionExecutionResult } from "../../../storage/model/Block"; +import { + StateTransitionBatch, + TransactionExecutionResult, +} from "../../../storage/model/Block"; import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; const errors = { @@ -39,6 +55,14 @@ export type RuntimeContextReducedExecutionResult = Pick< "stateTransitions" | "status" | "statusMessage" | "stackTrace" | "events" >; +export type BlockTrackers = Pick< + BlockProverState, + | "transactionList" + | "eternalTransactionsList" + | "incomingMessages" + | "blockHashRoot" +>; + function getAreProofsEnabledFromModule( module: RuntimeModule ): AreProofsEnabled { @@ -153,6 +177,8 @@ function traceSTs(msg: string, stateTransitions: StateTransition[]) { export class TransactionExecutionService { private readonly transactionHooks: ProvableTransactionHook[]; + private readonly blockProver: BlockProverProgrammable; + public constructor( @inject("Runtime") private readonly runtime: Runtime, @inject("Protocol") @@ -164,6 +190,8 @@ export class TransactionExecutionService { this.transactionHooks = protocol.dependencyContainer.resolveAll( "ProvableTransactionHook" ); + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + this.blockProver = (protocol.blockProver as BlockProver).zkProgrammable; } private async executeRuntimeMethod( @@ -186,76 +214,109 @@ export class TransactionExecutionService { executionContext.afterMethod(); } - private async executeProtocolHooks( - runtimeContextInputs: RuntimeMethodExecutionData, - blockContextInputs: BlockProverExecutionData, + private async executeProtocolHooks< + T extends BeforeTransactionHookArguments | AfterTransactionHookArguments, + >( + hookArguments: T, + method: ( + module: ProvableTransactionHook, + args: T + ) => Promise, + hookName: string, runSimulated = false ) { - return await executeWithExecutionContext( + const result = await executeWithExecutionContext( async () => await this.wrapHooksForContext(async () => { await mapSequential( this.transactionHooks, async (transactionHook) => { - await transactionHook.onTransaction(blockContextInputs); + await method(transactionHook, hookArguments); } ); }), - runtimeContextInputs, + { + transaction: hookArguments.transaction, + networkState: hookArguments.networkState, + }, runSimulated ); + + if (!result.status.toBoolean()) { + const error = new Error( + `Protocol hooks not executable: ${result.statusMessage ?? "unknown"}` + ); + log.debug("Protocol hook error stack trace:", result.stackTrace); + // Propagate stack trace from the assertion + throw error; + } + + traceSTs(`${hookName} STs:`, result.stateTransitions); + + return result; + } + + private buildSTBatches( + transitions: StateTransition[][], + runtimeStatus: Bool + ): StateTransitionBatch[] { + const statuses = [true, runtimeStatus.toBoolean(), false]; + const reducedTransitions = transitions.map((batch) => + reduceStateTransitions(batch).map((transition) => + UntypedStateTransition.fromStateTransition(transition) + ) + ); + + assert.equal(reducedTransitions.length, 3); + + return zip(reducedTransitions, statuses).map( + ([stateTransitions, applied]) => ({ + stateTransitions: stateTransitions!, + applied: applied!, + }) + ); } public async createExecutionTrace( asyncStateService: CachedStateService, tx: PendingTransaction, - networkState: NetworkState - ): Promise { + networkState: NetworkState, + state: BlockTrackers + ): Promise<[BlockTrackers, TransactionExecutionResult]> { // TODO Use RecordingStateService -> async asProver needed const recordingStateService = new CachedStateService(asyncStateService); const { method, args, module } = await decodeTransaction(tx, this.runtime); - // Disable proof generation for tracing + // Disable proof generation for sequencing the runtime + // TODO Is that even needed? const appChain = getAreProofsEnabledFromModule(module); const previousProofsEnabled = appChain.areProofsEnabled; appChain.setProofsEnabled(false); const signedTransaction = tx.toProtocolTransaction(); - const blockContextInputs: BlockProverExecutionData = { - networkState, - transaction: signedTransaction.transaction, - signature: signedTransaction.signature, - }; const runtimeContextInputs = { - transaction: blockContextInputs.transaction, - networkState: blockContextInputs.networkState, + transaction: signedTransaction.transaction, + networkState, }; // The following steps generate and apply the correct STs with the right values this.stateServiceProvider.setCurrentStateService(recordingStateService); - const protocolResult = await this.executeProtocolHooks( - runtimeContextInputs, - blockContextInputs + // Execute beforeTransaction hooks + const beforeTxArguments = toBeforeTransactionHookArgument( + signedTransaction, + networkState, + state + ); + const beforeTxHookResult = await this.executeProtocolHooks( + beforeTxArguments, + async (hook, hookArgs) => await hook.beforeTransaction(hookArgs), + "beforeTx" ); - if (!protocolResult.status.toBoolean()) { - const error = new Error( - `Protocol hooks not executable: ${ - protocolResult.statusMessage ?? "unknown" - }` - ); - log.debug("Protocol hook error stack trace:", protocolResult.stackTrace); - // Propagate stack trace from the assertion - throw error; - } - - traceSTs("PSTs:", protocolResult.stateTransitions); - - // Apply protocol STs await recordingStateService.applyStateTransitions( - protocolResult.stateTransitions + beforeTxHookResult.stateTransitions ); const runtimeResult = await this.executeRuntimeMethod( @@ -273,6 +334,39 @@ export class TransactionExecutionService { ); } + // Add runtime to commitments + const newState = this.blockProver.addTransactionToBundle( + state, + Bool(tx.isMessage), + signedTransaction.transaction + ); + + // Execute afterTransaction hook + const afterTxArguments = toAfterTransactionHookArgument( + signedTransaction, + networkState, + newState, + new MethodPublicOutput({ + status: runtimeResult.status, + networkStateHash: networkState.hash(), + isMessage: Bool(tx.isMessage), + transactionHash: tx.hash(), + eventsHash: toEventsHash(runtimeResult.events), + stateTransitionsHash: toStateTransitionsHash( + runtimeResult.stateTransitions + ), + }) + ); + + const afterTxHookResult = await this.executeProtocolHooks( + afterTxArguments, + async (hook, hookArgs) => await hook.afterTransaction(hookArgs), + "afterTx" + ); + await recordingStateService.applyStateTransitions( + afterTxHookResult.stateTransitions + ); + await recordingStateService.mergeIntoParent(); // Reset global stateservice @@ -281,22 +375,27 @@ export class TransactionExecutionService { // Reset proofs enabled appChain.setProofsEnabled(previousProofsEnabled); + // Extract sequencing results const events = extractEvents(runtimeResult); + const stateTransitions = this.buildSTBatches( + [ + beforeTxHookResult.stateTransitions, + runtimeResult.stateTransitions, + afterTxHookResult.stateTransitions, + ], + runtimeResult.status + ); - return { - tx, - status: runtimeResult.status, - statusMessage: runtimeResult.statusMessage, - - stateTransitions: runtimeResult.stateTransitions.map((st) => - UntypedStateTransition.fromStateTransition(st) - ), - - protocolTransitions: protocolResult.stateTransitions.map((st) => - UntypedStateTransition.fromStateTransition(st) - ), - - events, - }; + return [ + state, + { + tx, + status: runtimeResult.status, + statusMessage: runtimeResult.statusMessage, + + stateTransitions, + events, + }, + ]; } } diff --git a/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts b/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts index e9cd713fe..460d0c337 100644 --- a/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts @@ -9,8 +9,9 @@ import { StateTransitionProvable, BlockHashMerkleTreeWitness, MandatoryProtocolModulesRecord, + WitnessedRootWitness, } from "@proto-kit/protocol"; -import { Proof } from "o1js"; +import { Bool, Proof } from "o1js"; import { ProvableMethodExecutionContext, CompileRegistry, @@ -18,12 +19,12 @@ import { import { Task, TaskSerializer } from "../../../worker/flow/Task"; import { ProofTaskSerializer } from "../../../helpers/utils"; -import { PreFilledStateService } from "../../../state/prefilled/PreFilledStateService"; import { TaskWorkerModule } from "../../../worker/worker/TaskWorkerModule"; import { PairingDerivedInput } from "../flow/ReductionTaskFlow"; -import { TaskStateRecord } from "../TransactionTraceService"; +import type { TaskStateRecord } from "../tracing/BlockTracingService"; import { NewBlockProvingParametersSerializer } from "./serializers/NewBlockProvingParametersSerializer"; +import { executeWithPrefilledStateService } from "./TransactionProvingTask"; type BlockProof = Proof; @@ -31,7 +32,10 @@ export interface NewBlockProverParameters { publicInput: BlockProverPublicInput; networkState: NetworkState; blockWitness: BlockHashMerkleTreeWitness; - startingState: TaskStateRecord; + deferSTProof: Bool; + afterBlockRootWitness: WitnessedRootWitness; + startingStateBeforeHook: TaskStateRecord; + startingStateAfterHook: TaskStateRecord; } export type NewBlockProvingParameters = PairingDerivedInput< @@ -84,43 +88,37 @@ export class NewBlockTask ); } - private async executeWithPrefilledStateService( - startingState: TaskStateRecord, - callback: () => Promise - ): Promise { - const prefilledStateService = new PreFilledStateService({ - ...startingState, - }); - this.protocol.stateServiceProvider.setCurrentStateService( - prefilledStateService + public async compute(input: NewBlockProvingParameters): Promise { + const { input1, input2, params: parameters } = input; + const { + networkState, + blockWitness, + startingStateBeforeHook, + startingStateAfterHook, + publicInput, + deferSTProof, + afterBlockRootWitness, + } = parameters; + + await this.blockProver.proveBlock( + publicInput, + networkState, + blockWitness, + input1, + deferSTProof, + afterBlockRootWitness, + input2 ); - const returnValue = await callback(); - - this.protocol.stateServiceProvider.popCurrentStateService(); - - return returnValue; - } + await executeWithPrefilledStateService( + this.protocol.stateServiceProvider, + [startingStateBeforeHook, startingStateAfterHook], + async () => {} + ); - public async compute(input: NewBlockProvingParameters): Promise { - // TODO I left the task arg for the ST Proof in, until it will be reworked - // with the new ST Prover - const { input2, params: parameters } = input; - const { networkState, blockWitness, startingState, publicInput } = - parameters; - - await this.executeWithPrefilledStateService(startingState, async () => { - await this.blockProver.proveBlock( - publicInput, - networkState, - blockWitness, - // input1, - input2 - ); - }); - - return await this.executeWithPrefilledStateService( - startingState, + return await executeWithPrefilledStateService( + this.protocol.stateServiceProvider, + [startingStateBeforeHook, startingStateAfterHook], async () => await this.executionContext.current().result.prove() ); diff --git a/packages/sequencer/src/protocol/production/tasks/RuntimeProvingTask.ts b/packages/sequencer/src/protocol/production/tasks/RuntimeProvingTask.ts index 990f9784b..3fe0756d3 100644 --- a/packages/sequencer/src/protocol/production/tasks/RuntimeProvingTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/RuntimeProvingTask.ts @@ -17,7 +17,7 @@ import { ProofTaskSerializer } from "../../../helpers/utils"; import { TaskWorkerModule } from "../../../worker/worker/TaskWorkerModule"; import { PreFilledStateService } from "../../../state/prefilled/PreFilledStateService"; import { PendingTransaction } from "../../../mempool/PendingTransaction"; -import { TaskStateRecord } from "../TransactionTraceService"; +import { TaskStateRecord } from "../tracing/BlockTracingService"; import { RuntimeProofParametersSerializer } from "./serializers/RuntimeProofParametersSerializer"; @@ -29,8 +29,6 @@ export interface RuntimeProofParameters { state: TaskStateRecord; } -export type JSONEncodableState = Record; - @injectable() @scoped(Lifecycle.ContainerScoped) export class RuntimeProvingTask diff --git a/packages/sequencer/src/protocol/production/tasks/StateTransitionReductionTask.ts b/packages/sequencer/src/protocol/production/tasks/StateTransitionReductionTask.ts index 6b2612f2d..940f1faaa 100644 --- a/packages/sequencer/src/protocol/production/tasks/StateTransitionReductionTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/StateTransitionReductionTask.ts @@ -31,8 +31,9 @@ export class StateTransitionReductionTask public constructor( @inject("Protocol") - private readonly protocol: Protocol< - MandatoryProtocolModulesRecord & ProtocolModulesRecord + private readonly protocol: Pick< + Protocol, + "stateTransitionProver" >, private readonly executionContext: ProvableMethodExecutionContext, private readonly compileRegistry: CompileRegistry diff --git a/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts b/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts index 8c71adbeb..03303d3fa 100644 --- a/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts @@ -1,10 +1,10 @@ import { inject, injectable, Lifecycle, scoped } from "tsyringe"; import { + AppliedStateTransitionBatchState, MandatoryProtocolModulesRecord, + MerkleWitnessBatch, Protocol, ProtocolModulesRecord, - ProvableStateTransition, - ProvableStateTransitionType, StateTransitionProof, StateTransitionProvable, StateTransitionProvableBatch, @@ -26,10 +26,8 @@ import { StateTransitionParametersSerializer } from "./serializers/StateTransiti export interface StateTransitionProofParameters { publicInput: StateTransitionProverPublicInput; - stateTransitions: { - transition: ProvableStateTransition; - type: ProvableStateTransitionType; - }[]; + batch: StateTransitionProvableBatch; + batchState: AppliedStateTransitionBatchState; merkleWitnesses: RollupMerkleTreeWitness[]; } @@ -68,12 +66,11 @@ export class StateTransitionTask public async compute( input: StateTransitionProofParameters ): Promise { - const stBatch = input.stateTransitions.slice(); - const merkleWitnesses = input.merkleWitnesses.slice(); - - const output = await this.stateTransitionProver.runBatch( + const output = await this.stateTransitionProver.proveBatch( input.publicInput, - StateTransitionProvableBatch.fromMappings(stBatch, merkleWitnesses) + input.batch, + new MerkleWitnessBatch({ witnesses: input.merkleWitnesses.slice() }), + input.batchState ); log.debug("STTask public io:", { input: StateTransitionProverPublicInput.toJSON(input.publicInput), diff --git a/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts b/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts index 6b016666e..7a27b2f9c 100644 --- a/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts @@ -1,19 +1,12 @@ import { BlockProof, BlockProvable, - BlockProverExecutionData, - BlockProverPublicInput, MandatoryProtocolModulesRecord, - MethodPublicOutput, Protocol, ProtocolModulesRecord, - RuntimeVerificationKeyAttestation, StateServiceProvider, - StateTransitionProof, - StateTransitionProvable, DynamicRuntimeProof, } from "@proto-kit/protocol"; -import { Proof } from "o1js"; import { Runtime } from "@proto-kit/module"; import { inject, injectable, Lifecycle, scoped } from "tsyringe"; import { @@ -22,28 +15,39 @@ import { } from "@proto-kit/common"; import { ProofTaskSerializer } from "../../../helpers/utils"; -import { PairingDerivedInput } from "../flow/ReductionTaskFlow"; import { TaskSerializer, Task } from "../../../worker/flow/Task"; import { PreFilledStateService } from "../../../state/prefilled/PreFilledStateService"; import { TaskWorkerModule } from "../../../worker/worker/TaskWorkerModule"; -import { TaskStateRecord } from "../TransactionTraceService"; +import type { TaskStateRecord } from "../tracing/BlockTracingService"; import { TransactionProvingTaskParameterSerializer } from "./serializers/TransactionProvingTaskParameterSerializer"; +import { + TransactionProvingTaskParameters, + TransactionProvingType, +} from "./serializers/types/TransactionProvingTypes"; + +export async function executeWithPrefilledStateService( + stateServiceProvider: StateServiceProvider, + startingStates: TaskStateRecord[], + callback: () => Promise +): Promise { + startingStates + .slice() + .reverse() + .forEach((startingState) => { + stateServiceProvider.setCurrentStateService( + new PreFilledStateService({ + ...startingState, + }) + ); + }); -type RuntimeProof = Proof; + const returnValue = await callback(); -export interface BlockProverParameters { - publicInput: BlockProverPublicInput; - executionData: BlockProverExecutionData; - startingState: TaskStateRecord; - verificationKeyAttestation: RuntimeVerificationKeyAttestation; -} + stateServiceProvider.popCurrentStateService(); -export type TransactionProvingTaskParameters = PairingDerivedInput< - StateTransitionProof, - RuntimeProof, - BlockProverParameters ->; + return returnValue; +} @injectable() @scoped(Lifecycle.ContainerScoped) @@ -51,8 +55,6 @@ export class TransactionProvingTask extends TaskWorkerModule implements Task { - private readonly stateTransitionProver: StateTransitionProvable; - private readonly blockProver: BlockProvable; private readonly runtimeProofType = @@ -66,25 +68,18 @@ export class TransactionProvingTask MandatoryProtocolModulesRecord & ProtocolModulesRecord >, @inject("Runtime") private readonly runtime: Runtime, - @inject("StateServiceProvider") - private readonly stateServiceProvider: StateServiceProvider, private readonly executionContext: ProvableMethodExecutionContext, private readonly compileRegistry: CompileRegistry ) { super(); - this.stateTransitionProver = protocol.stateTransitionProver; this.blockProver = this.protocol.blockProver; } public inputSerializer(): TaskSerializer { - const stProofSerializer = new ProofTaskSerializer( - this.stateTransitionProver.zkProgrammable.zkProgram[0].Proof - ); const runtimeProofSerializer = new ProofTaskSerializer( this.runtimeProofType ); return new TransactionProvingTaskParameterSerializer( - stProofSerializer, runtimeProofSerializer ); } @@ -95,47 +90,37 @@ export class TransactionProvingTask ); } - private async executeWithPrefilledStateService( - startingState: TaskStateRecord, - callback: () => Promise - ): Promise { - const prefilledStateService = new PreFilledStateService({ - ...startingState, - }); - this.stateServiceProvider.setCurrentStateService(prefilledStateService); - - const returnValue = await callback(); - - this.stateServiceProvider.popCurrentStateService(); - - return returnValue; - } - public async compute( - input: PairingDerivedInput< - StateTransitionProof, - RuntimeProof, - BlockProverParameters - > + input: TransactionProvingTaskParameters ): Promise { - const stateTransitionProof = input.input1; - const runtimeProofDynamic = DynamicRuntimeProof.fromProof(input.input2); - - await this.executeWithPrefilledStateService( - input.params.startingState, + await executeWithPrefilledStateService( + this.protocol.stateServiceProvider, + input.parameters.startingState, async () => { - await this.blockProver.proveTransaction( - input.params.publicInput, - stateTransitionProof, - runtimeProofDynamic, - input.params.executionData, - input.params.verificationKeyAttestation - ); + const { type, parameters } = input; + + const proof1 = DynamicRuntimeProof.fromProof(input.proof1); + + if (type === TransactionProvingType.SINGLE) { + await this.blockProver.proveTransaction( + parameters.publicInput, + proof1, + parameters.executionData + ); + } else { + await this.blockProver.proveTransactions( + parameters.publicInput, + proof1, + DynamicRuntimeProof.fromProof(input.proof2), + parameters.executionData + ); + } } ); - return await this.executeWithPrefilledStateService( - input.params.startingState, + return await executeWithPrefilledStateService( + this.protocol.stateServiceProvider, + input.parameters.startingState, async () => await this.executionContext.current().result.prove() ); diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/DecodedStateSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/DecodedStateSerializer.ts index eb753c3e7..475a223be 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/DecodedStateSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/DecodedStateSerializer.ts @@ -1,7 +1,8 @@ import { Field } from "o1js"; -import { TaskStateRecord } from "../../TransactionTraceService"; -import { JSONEncodableState } from "../RuntimeProvingTask"; +import type { TaskStateRecord } from "../../tracing/BlockTracingService"; + +export type JSONEncodableState = Record; export class DecodedStateSerializer { public static fromJSON(json: JSONEncodableState): TaskStateRecord { diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts index f996350a5..c8a1f2640 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts @@ -8,15 +8,19 @@ import { StateTransitionProof, StateTransitionProverPublicInput, StateTransitionProverPublicOutput, + WitnessedRootWitness, } from "@proto-kit/protocol"; +import { Bool } from "o1js"; import type { NewBlockProverParameters } from "../NewBlockTask"; import { TaskSerializer } from "../../../../worker/flow/Task"; -import { JSONEncodableState } from "../RuntimeProvingTask"; import { ProofTaskSerializer } from "../../../../helpers/utils"; import { PairingDerivedInput } from "../../flow/ReductionTaskFlow"; -import { DecodedStateSerializer } from "./DecodedStateSerializer"; +import { + DecodedStateSerializer, + JSONEncodableState, +} from "./DecodedStateSerializer"; interface JsonType { input1: string; @@ -25,7 +29,10 @@ interface JsonType { publicInput: ReturnType; networkState: ReturnType; blockWitness: ReturnType; - startingState: JSONEncodableState; + startingStateBeforeHook: JSONEncodableState; + startingStateAfterHook: JSONEncodableState; + deferSTProof: boolean; + afterBlockRootWitness: ReturnType; }; } @@ -63,8 +70,18 @@ export class NewBlockProvingParametersSerializer input.params.blockWitness ), - startingState: DecodedStateSerializer.toJSON( - input.params.startingState + startingStateBeforeHook: DecodedStateSerializer.toJSON( + input.params.startingStateBeforeHook + ), + + startingStateAfterHook: DecodedStateSerializer.toJSON( + input.params.startingStateAfterHook + ), + + deferSTProof: input.params.deferSTProof.toBoolean(), + + afterBlockRootWitness: WitnessedRootWitness.toJSON( + input.params.afterBlockRootWitness ), }, } satisfies JsonType); @@ -90,8 +107,18 @@ export class NewBlockProvingParametersSerializer BlockHashMerkleTreeWitness.fromJSON(jsonObject.params.blockWitness) ), - startingState: DecodedStateSerializer.fromJSON( - jsonObject.params.startingState + startingStateBeforeHook: DecodedStateSerializer.fromJSON( + jsonObject.params.startingStateBeforeHook + ), + + startingStateAfterHook: DecodedStateSerializer.fromJSON( + jsonObject.params.startingStateBeforeHook + ), + + deferSTProof: Bool(jsonObject.params.deferSTProof), + + afterBlockRootWitness: WitnessedRootWitness.fromJSON( + jsonObject.params.afterBlockRootWitness ), }, }; diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/RuntimeProofParametersSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/RuntimeProofParametersSerializer.ts index 5a4301d3d..102d89c09 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/RuntimeProofParametersSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/RuntimeProofParametersSerializer.ts @@ -1,12 +1,13 @@ import { NetworkState, ReturnType } from "@proto-kit/protocol"; -import { Field } from "o1js"; import { TaskSerializer } from "../../../../worker/flow/Task"; import { PendingTransaction } from "../../../../mempool/PendingTransaction"; -import type { +import type { RuntimeProofParameters } from "../RuntimeProvingTask"; + +import { + DecodedStateSerializer, JSONEncodableState, - RuntimeProofParameters, -} from "../RuntimeProvingTask"; +} from "./DecodedStateSerializer"; export class RuntimeProofParametersSerializer implements TaskSerializer @@ -15,13 +16,7 @@ export class RuntimeProofParametersSerializer const jsonReadyObject = { tx: parameters.tx.toJSON(), networkState: NetworkState.toJSON(parameters.networkState), - - state: Object.fromEntries( - Object.entries(parameters.state).map(([key, value]) => [ - key, - value?.map((v) => v.toString()), - ]) - ), + state: DecodedStateSerializer.toJSON(parameters.state), }; return JSON.stringify(jsonReadyObject); } @@ -40,12 +35,7 @@ export class RuntimeProofParametersSerializer NetworkState.fromJSON(jsonReadyObject.networkState) ), - state: Object.fromEntries( - Object.entries(jsonReadyObject.state).map(([key, values]) => [ - key, - values?.map((encodedField) => Field(encodedField)), - ]) - ), + state: DecodedStateSerializer.fromJSON(jsonReadyObject.state), }; } } diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts index e06cbf91e..dd7966730 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts @@ -1,21 +1,18 @@ import { - ProvableStateTransition, - ProvableStateTransitionType, + AppliedStateTransitionBatchState, + StateTransitionProvableBatch, StateTransitionProverPublicInput, } from "@proto-kit/protocol"; import { RollupMerkleTreeWitness } from "@proto-kit/common"; -import { Bool } from "o1js"; import { TaskSerializer } from "../../../../worker/flow/Task"; import type { StateTransitionProofParameters } from "../StateTransitionTask"; interface StateTransitionParametersJSON { publicInput: ReturnType; - stateTransitions: { - transition: ReturnType; - type: boolean; - }[]; + batch: ReturnType; merkleWitnesses: ReturnType[]; + batchState: ReturnType; } export class StateTransitionParametersSerializer @@ -27,16 +24,15 @@ export class StateTransitionParametersSerializer parameters.publicInput ), - stateTransitions: parameters.stateTransitions.map((st) => { - return { - transition: ProvableStateTransition.toJSON(st.transition), - type: st.type.type.toBoolean(), - }; - }), + batch: StateTransitionProvableBatch.toJSON(parameters.batch), merkleWitnesses: parameters.merkleWitnesses.map((witness) => RollupMerkleTreeWitness.toJSON(witness) ), + + batchState: AppliedStateTransitionBatchState.toJSON( + parameters.batchState + ), } satisfies StateTransitionParametersJSON); } @@ -49,20 +45,16 @@ export class StateTransitionParametersSerializer parsed.publicInput ), - stateTransitions: parsed.stateTransitions.map((st) => { - return { - transition: new ProvableStateTransition( - ProvableStateTransition.fromJSON(st.transition) - ), - - type: new ProvableStateTransitionType({ type: Bool(st.type) }), - }; - }), + batch: StateTransitionProvableBatch.fromJSON(parsed.batch), merkleWitnesses: parsed.merkleWitnesses.map( (witness) => new RollupMerkleTreeWitness(RollupMerkleTreeWitness.fromJSON(witness)) ), + + batchState: new AppliedStateTransitionBatchState( + AppliedStateTransitionBatchState.fromJSON(parsed.batchState) + ), }; } } diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts index 7963280f7..da655d863 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts @@ -1,97 +1,220 @@ import { - BlockProverExecutionData, BlockProverPublicInput, + BlockProverTransactionArguments, MethodPublicOutput, + NetworkState, ReturnType, - StateTransitionProverPublicInput, - StateTransitionProverPublicOutput, + RuntimeTransaction, } from "@proto-kit/protocol"; +import { JsonProof, Signature } from "o1js"; import { TaskSerializer } from "../../../../worker/flow/Task"; import { ProofTaskSerializer } from "../../../../helpers/utils"; -import type { TransactionProvingTaskParameters } from "../TransactionProvingTask"; -import type { JSONEncodableState } from "../RuntimeProvingTask"; -import { DecodedStateSerializer } from "./DecodedStateSerializer"; +import { + TransactionProvingTaskParameters, + TransactionProvingType, +} from "./types/TransactionProvingTypes"; +import { + DecodedStateSerializer, + JSONEncodableState, +} from "./DecodedStateSerializer"; import { RuntimeVerificationKeyAttestationSerializer } from "./RuntimeVerificationKeyAttestationSerializer"; +export type BlockProverTransactionArgumentsJSON = { + transaction: ReturnType; + signature: ReturnType; + verificationKeyAttestation: ReturnType< + typeof RuntimeVerificationKeyAttestationSerializer.toJSON + >; +}; + +export type SingleExecutionDataJSON = { + transaction: BlockProverTransactionArgumentsJSON; + networkState: ReturnType; +}; + +export type MultiExecutionDataJSON = { + transaction1: BlockProverTransactionArgumentsJSON; + transaction2: BlockProverTransactionArgumentsJSON; + networkState: ReturnType; +}; + +export type TransactionProverTaskParametersJSON< + ExecutionData extends SingleExecutionDataJSON | MultiExecutionDataJSON, +> = { + startingState: JSONEncodableState[]; + publicInput: ReturnType; + executionData: ExecutionData; +}; + +export type TransactionProvingTaskParametersJSON = + | { + type: TransactionProvingType.SINGLE; + proof1: JsonProof; + parameters: TransactionProverTaskParametersJSON; + } + | { + type: TransactionProvingType.MULTI; + proof1: JsonProof; + proof2: JsonProof; + parameters: TransactionProverTaskParametersJSON; + }; + export class TransactionProvingTaskParameterSerializer implements TaskSerializer { public constructor( - private readonly stProofSerializer: ProofTaskSerializer< - StateTransitionProverPublicInput, - StateTransitionProverPublicOutput - >, private readonly runtimeProofSerializer: ProofTaskSerializer< - undefined, + void, MethodPublicOutput > ) {} - toJSON(input: TransactionProvingTaskParameters): string { - const jsonReadyObject = { - input1: this.stProofSerializer.toJSON(input.input1), - input2: this.runtimeProofSerializer.toJSON(input.input2), - - params: { - publicInput: BlockProverPublicInput.toJSON(input.params.publicInput), - - executionData: BlockProverExecutionData.toJSON( - input.params.executionData + private blockProverArgumentsToJson( + args: BlockProverTransactionArguments + ): BlockProverTransactionArgumentsJSON { + return { + transaction: RuntimeTransaction.toJSON(args.transaction), + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + signature: Signature.toJSON(args.signature), + verificationKeyAttestation: + RuntimeVerificationKeyAttestationSerializer.toJSON( + args.verificationKeyAttestation ), + }; + } - startingState: DecodedStateSerializer.toJSON( - input.params.startingState + private blockProverArgumentsFromJson( + args: BlockProverTransactionArgumentsJSON + ): BlockProverTransactionArguments { + return { + transaction: new RuntimeTransaction( + RuntimeTransaction.fromJSON(args.transaction) + ), + signature: Signature.fromJSON(args.signature), + verificationKeyAttestation: + RuntimeVerificationKeyAttestationSerializer.fromJSON( + args.verificationKeyAttestation ), - - verificationKeyAttestation: - RuntimeVerificationKeyAttestationSerializer.toJSON( - input.params.verificationKeyAttestation - ), - }, }; - return JSON.stringify(jsonReadyObject); } - async fromJSON(json: string): Promise { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const jsonReadyObject: { - input1: string; - input2: string; - params: { - publicInput: ReturnType; - executionData: ReturnType; - startingState: JSONEncodableState; - verificationKeyAttestation: ReturnType< - typeof RuntimeVerificationKeyAttestationSerializer.toJSON - >; - }; - } = JSON.parse(json); + public toJSON(input: TransactionProvingTaskParameters): string { + let taskParamsJson: TransactionProvingTaskParametersJSON; - return { - input1: await this.stProofSerializer.fromJSON(jsonReadyObject.input1), - input2: await this.runtimeProofSerializer.fromJSON( - jsonReadyObject.input2 + const { type, parameters } = input; + + const partialParameters = { + publicInput: BlockProverPublicInput.toJSON(parameters.publicInput), + + startingState: parameters.startingState.map((stateRecord) => + DecodedStateSerializer.toJSON(stateRecord) ), + }; - params: { - publicInput: BlockProverPublicInput.fromJSON( - jsonReadyObject.params.publicInput - ), + // The reason we can't just use the structs toJSON is that the VerificationKey + // toJSON and fromJSON isn't consistent -> i.e. the serialization doesn't work + // the same both ways. We fix that in our custom serializer + if (type === TransactionProvingType.SINGLE) { + const { executionData } = parameters; + const executionDataJson: SingleExecutionDataJSON = { + networkState: NetworkState.toJSON(executionData.networkState), + transaction: this.blockProverArgumentsToJson(executionData.transaction), + }; - executionData: BlockProverExecutionData.fromJSON( - jsonReadyObject.params.executionData + taskParamsJson = { + type, + proof1: this.runtimeProofSerializer.toJSONProof(input.proof1), + parameters: { + ...partialParameters, + executionData: executionDataJson, + }, + }; + } else { + const { executionData } = parameters; + const executionDataJson: MultiExecutionDataJSON = { + networkState: NetworkState.toJSON(executionData.networkState), + transaction1: this.blockProverArgumentsToJson( + executionData.transaction1 + ), + transaction2: this.blockProverArgumentsToJson( + executionData.transaction2 ), + }; + + taskParamsJson = { + type, + proof1: this.runtimeProofSerializer.toJSONProof(input.proof1), + proof2: this.runtimeProofSerializer.toJSONProof(input.proof2), + parameters: { + ...partialParameters, + executionData: executionDataJson, + }, + }; + } + + return JSON.stringify(taskParamsJson); + } + + public async fromJSON( + json: string + ): Promise { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const jsonReadyObject: TransactionProvingTaskParametersJSON = + JSON.parse(json); - startingState: DecodedStateSerializer.fromJSON( - jsonReadyObject.params.startingState + const { type, parameters } = jsonReadyObject; + + const partialParameters = { + publicInput: BlockProverPublicInput.fromJSON(parameters.publicInput), + + startingState: parameters.startingState.map((stateRecord) => + DecodedStateSerializer.fromJSON(stateRecord) + ), + }; + + if (type === TransactionProvingType.SINGLE) { + return { + type, + proof1: await this.runtimeProofSerializer.fromJSONProof( + jsonReadyObject.proof1 ), + parameters: { + ...partialParameters, + executionData: { + transaction: this.blockProverArgumentsFromJson( + parameters.executionData.transaction + ), + networkState: new NetworkState( + NetworkState.fromJSON(parameters.executionData.networkState) + ), + }, + }, + }; + } - verificationKeyAttestation: - RuntimeVerificationKeyAttestationSerializer.fromJSON( - jsonReadyObject.params.verificationKeyAttestation + return { + type, + proof1: await this.runtimeProofSerializer.fromJSONProof( + jsonReadyObject.proof1 + ), + proof2: await this.runtimeProofSerializer.fromJSONProof( + jsonReadyObject.proof2 + ), + parameters: { + ...partialParameters, + executionData: { + transaction1: this.blockProverArgumentsFromJson( + parameters.executionData.transaction1 + ), + transaction2: this.blockProverArgumentsFromJson( + parameters.executionData.transaction2 + ), + networkState: new NetworkState( + NetworkState.fromJSON(parameters.executionData.networkState) ), + }, }, }; } diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts b/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts new file mode 100644 index 000000000..b091abdc2 --- /dev/null +++ b/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts @@ -0,0 +1,39 @@ +import { + BlockProverMultiTransactionExecutionData, + BlockProverPublicInput, + BlockProverSingleTransactionExecutionData, + MethodPublicOutput, +} from "@proto-kit/protocol"; +import { Proof } from "o1js"; + +import type { TaskStateRecord } from "../../../tracing/BlockTracingService"; + +export type RuntimeProof = Proof; + +export enum TransactionProvingType { + SINGLE, + MULTI, +} + +export interface TransactionProverTaskParameters< + ExecutionData extends + | BlockProverSingleTransactionExecutionData + | BlockProverMultiTransactionExecutionData, +> { + publicInput: BlockProverPublicInput; + executionData: ExecutionData; + startingState: TaskStateRecord[]; +} + +export type TransactionProvingTaskParameters = + | { + type: TransactionProvingType.SINGLE; + parameters: TransactionProverTaskParameters; + proof1: RuntimeProof; + } + | { + type: TransactionProvingType.MULTI; + parameters: TransactionProverTaskParameters; + proof1: RuntimeProof; + proof2: RuntimeProof; + }; diff --git a/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts new file mode 100644 index 000000000..f7a46c143 --- /dev/null +++ b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts @@ -0,0 +1,108 @@ +import { yieldSequential } from "@proto-kit/common"; +import { + AppliedBatchHashList, + MinaActionsHashList, + TransactionHashList, + WitnessedRootHashList, +} from "@proto-kit/protocol"; +import { Field } from "o1js"; +import { injectable } from "tsyringe"; + +import { CachedMerkleTreeStore } from "../../../state/merkle/CachedMerkleTreeStore"; +import { StateTransitionProofParameters } from "../tasks/StateTransitionTask"; +import { BlockWithResult } from "../../../storage/model/Block"; + +import { + BlockTrace, + BlockTracingService, + BlockTracingState, +} from "./BlockTracingService"; +import { StateTransitionTracingService } from "./StateTransitionTracingService"; + +type BatchTracingState = Omit; + +export type BatchTrace = { + blocks: BlockTrace[]; + stateTransitionTrace: StateTransitionProofParameters[]; +}; + +@injectable() +export class BatchTracingService { + public constructor( + private readonly blockTracingService: BlockTracingService, + private readonly stateTransitionTracingService: StateTransitionTracingService + ) {} + + private createBatchState(block: BlockWithResult): BatchTracingState { + return { + pendingSTBatches: new AppliedBatchHashList(), + witnessedRoots: new WitnessedRootHashList(), + stateRoot: Field(block.block.fromStateRoot), + eternalTransactionsList: new TransactionHashList( + block.block.fromEternalTransactionsHash + ), + incomingMessages: new MinaActionsHashList(block.block.fromMessagesHash), + networkState: block.block.networkState.before, + }; + } + + public async traceBlocks(blocks: BlockWithResult[]) { + const batchState = this.createBatchState(blocks[0]); + + // Trace blocks + const numBlocks = blocks.length; + const [, blockTraces] = await yieldSequential( + blocks, + async (state, block, index) => { + const blockProverState: BlockTracingState = { + ...state, + transactionList: new TransactionHashList(), + }; + const [newState, trace] = await this.blockTracingService.traceBlock( + blockProverState, + block, + index === numBlocks - 1 + ); + return [newState, trace]; + }, + batchState + ); + + return blockTraces; + } + + public async traceStateTransitions( + blocks: BlockWithResult[], + merkleTreeStore: CachedMerkleTreeStore + ) { + const batches = this.stateTransitionTracingService.extractSTBatches(blocks); + + return await this.stateTransitionTracingService.createMerkleTrace( + merkleTreeStore, + batches + ); + } + + public async traceBatch( + blocks: BlockWithResult[], + merkleTreeStore: CachedMerkleTreeStore + ): Promise { + if (blocks.length === 0) { + return { blocks: [], stateTransitionTrace: [] }; + } + + // Traces the STs and the blocks in parallel, however not in separate processes + // Therefore, we only optimize the idle time for async operations like DB reads + const [blockTraces, stateTransitionTrace] = await Promise.all([ + // Trace blocks + this.traceBlocks(blocks), + // Trace STs + this.traceStateTransitions(blocks, merkleTreeStore), + ]); + + return { + blocks: blockTraces, + stateTransitionTrace, + }; + } +} diff --git a/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts b/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts new file mode 100644 index 000000000..80c5e70fe --- /dev/null +++ b/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts @@ -0,0 +1,144 @@ +import { + BlockProverPublicInput, + BlockProverState, + WitnessedRootWitness, +} from "@proto-kit/protocol"; +import { Bool, Field } from "o1js"; +import { toStateTransitionsHash } from "@proto-kit/module"; +import { yieldSequential } from "@proto-kit/common"; +// eslint-disable-next-line import/no-extraneous-dependencies +import chunk from "lodash/chunk"; +import { injectable } from "tsyringe"; + +import { BlockWithResult } from "../../../storage/model/Block"; +import type { NewBlockProverParameters } from "../tasks/NewBlockTask"; + +import { + collectStartingState, + TransactionTrace, + TransactionTracingService, +} from "./TransactionTracingService"; + +export type TaskStateRecord = Record; + +export type BlockTracingState = Pick< + BlockProverState, + | "witnessedRoots" + | "stateRoot" + | "pendingSTBatches" + | "networkState" + | "transactionList" + | "eternalTransactionsList" + | "incomingMessages" +>; + +export type BlockTrace = { + blockParams: NewBlockProverParameters; + transactions: TransactionTrace[]; + // Only for debugging and logging + height: string; +}; + +@injectable() +export class BlockTracingService { + public constructor( + private readonly transactionTracing: TransactionTracingService + ) {} + + public async traceBlock( + state: BlockTracingState, + block: BlockWithResult, + includeSTProof: boolean + ): Promise<[BlockTracingState, BlockTrace]> { + const publicInput: BlockProverPublicInput = new BlockProverPublicInput({ + stateRoot: state.stateRoot, + blockNumber: block.block.height, + blockHashRoot: block.block.fromBlockHashRoot, + eternalTransactionsHash: block.block.fromEternalTransactionsHash, + incomingMessagesHash: block.block.fromMessagesHash, + transactionsHash: Field(0), + networkStateHash: block.block.networkState.before.hash(), + witnessedRootsHash: state.witnessedRoots.commitment, + pendingSTBatchesHash: state.pendingSTBatches.commitment, + }); + + const startingStateBeforeHook = collectStartingState( + block.block.beforeBlockStateTransitions + ); + + const blockTrace = { + publicInput, + networkState: block.block.networkState.before, + deferSTProof: Bool(!includeSTProof), + blockWitness: block.result.blockHashWitness, + startingStateBeforeHook, + } satisfies Partial; + + state.pendingSTBatches.push({ + batchHash: toStateTransitionsHash( + block.block.beforeBlockStateTransitions + ), + applied: Bool(true), + }); + state.networkState = block.block.networkState.during; + + const [afterState, transactionTraces] = await yieldSequential( + chunk(block.block.transactions, 2), + async (input, [transaction1, transaction2]) => { + const [output, trace] = + transaction2 !== undefined + ? await this.transactionTracing.createMultiTransactionTrace( + input, + transaction1, + transaction2 + ) + : await this.transactionTracing.createSingleTransactionTrace( + input, + transaction1 + ); + + return [output, trace]; + }, + state + ); + + const preimage = afterState.witnessedRoots + .getUnconstrainedValues() + .get() + .at(-2)?.preimage; + + const afterBlockRootWitness: WitnessedRootWitness = { + witnessedRoot: Field(block.result.witnessedRoots[0]), + preimage: preimage ?? Field(0), + }; + + if (afterState.pendingSTBatches.commitment.equals(0).not().toBoolean()) { + state.witnessedRoots.witnessRoot( + { + appliedBatchListState: afterState.pendingSTBatches.commitment, + root: afterBlockRootWitness.witnessedRoot, + }, + afterBlockRootWitness.preimage, + state.pendingSTBatches.commitment.equals(0).not() + ); + } + + const startingStateAfterHook = collectStartingState( + block.result.afterBlockStateTransitions + ); + state.networkState = block.result.afterNetworkState; + + return [ + afterState, + { + blockParams: { + ...blockTrace, + startingStateAfterHook, + afterBlockRootWitness, + }, + transactions: transactionTraces, + height: block.block.height.toString(), + }, + ]; + } +} diff --git a/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts b/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts new file mode 100644 index 000000000..9e64cbc16 --- /dev/null +++ b/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts @@ -0,0 +1,210 @@ +import { Bool, Field } from "o1js"; +import { mapSequential, RollupMerkleTree } from "@proto-kit/common"; +import { injectable } from "tsyringe"; +import { + AppliedBatchHashList, + AppliedStateTransitionBatchState, + DefaultProvableHashList, + ProvableStateTransition, + StateTransitionProvableBatch, + StateTransitionProverPublicInput, + StateTransitionType, + WitnessedRoot, +} from "@proto-kit/protocol"; + +import { distinctByString } from "../../../helpers/utils"; +import { BlockWithResult } from "../../../storage/model/Block"; +import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; +import { CachedMerkleTreeStore } from "../../../state/merkle/CachedMerkleTreeStore"; +import { StateTransitionProofParameters } from "../tasks/StateTransitionTask"; +import { SyncCachedMerkleTreeStore } from "../../../state/merkle/SyncCachedMerkleTreeStore"; + +export interface TracingStateTransitionBatch { + stateTransitions: UntypedStateTransition[]; + applied: boolean; + witnessRoot: boolean; +} + +@injectable() +export class StateTransitionTracingService { + private allKeys(stateTransitions: { path: Field }[]): Field[] { + // We have to do the distinct with strings because + // array.indexOf() doesn't work with fields + return stateTransitions.map((st) => st.path).filter(distinctByString); + } + + public extractSTBatches( + blocks: BlockWithResult[] + ): TracingStateTransitionBatch[] { + return blocks.reduce((previous, block) => { + const batches = [ + { + stateTransitions: block.block.beforeBlockStateTransitions, + applied: true, + }, + ...block.block.transactions.flatMap((tx) => tx.stateTransitions), + ].map((batch) => ({ ...batch, witnessRoot: false })); + + const batchBeforeWitnessing = previous.concat(batches); + + // If no STs were emitted in the current block, we fall back to the previous block + // If there are no batches, we don't push a witness attestation + if (batchBeforeWitnessing.length > 0) { + batchBeforeWitnessing.at(-1)!.witnessRoot = true; + } + + return batchBeforeWitnessing.concat({ + stateTransitions: block.result.afterBlockStateTransitions, + applied: true, + witnessRoot: false, + }); + }, []); + } + + public async createMerkleTrace( + merkleStore: CachedMerkleTreeStore, + stateTransitions: TracingStateTransitionBatch[] + ) { + const batches = StateTransitionProvableBatch.fromBatches( + stateTransitions.map( + ({ + stateTransitions: batchStateTransitions, + applied, + witnessRoot, + }) => ({ + stateTransitions: batchStateTransitions.map((transition) => + transition.toProvable() + ), + applied: Bool(applied), + witnessRoot: Bool(witnessRoot), + }) + ) + ); + + return await this.traceTransitions(merkleStore, batches); + } + + public async traceTransitions( + merkleStore: CachedMerkleTreeStore, + batches: StateTransitionProvableBatch[] + ): Promise { + const keys = this.allKeys( + batches.flatMap((batch) => + batch.batch.map((transition) => transition.stateTransition) + ) + ); + + await merkleStore.preloadKeys(keys.map((key) => key.toBigInt())); + + let batchMerkleStore = new SyncCachedMerkleTreeStore(merkleStore); + + let tree = new RollupMerkleTree(batchMerkleStore); + const initialRoot = tree.getRoot(); + + const batchList = new AppliedBatchHashList(Field(0)); + let currentSTList = new DefaultProvableHashList(ProvableStateTransition); + const witnessedRootsList = new DefaultProvableHashList( + WitnessedRoot + ); + + let finalizedStateRoot = initialRoot; + let danglingStateRoot = initialRoot; + + return await mapSequential< + StateTransitionProvableBatch, + StateTransitionProofParameters + >(batches, async (batch) => { + const batchState = new AppliedStateTransitionBatchState({ + batchHash: currentSTList.commitment, + root: danglingStateRoot, + }); + const publicInput: StateTransitionProverPublicInput = { + batchesHash: batchList.commitment, + currentBatchStateHash: batchState.hashOrZero(), + root: finalizedStateRoot, + witnessedRootsHash: witnessedRootsList.commitment, + }; + + const witnesses = await mapSequential( + batch.batch, + async (transitionInfo) => { + const { stateTransition, type, witnessRoot } = transitionInfo; + + const merkleWitness = tree.getWitness( + stateTransition.path.toBigInt() + ); + + if (stateTransition.to.isSome.toBoolean()) { + tree.setLeaf( + stateTransition.path.toBigInt(), + stateTransition.to.value + ); + + danglingStateRoot = tree.getRoot(); + } + + currentSTList.push(stateTransition); + + if (type.isClosing().toBoolean()) { + let apply; + + if ( + type.type.equals(StateTransitionType.closeAndApply).toBoolean() + ) { + apply = true; + + finalizedStateRoot = danglingStateRoot; + + // We can reuse the batchMerkleStore here, since mergeIntoParent() + // resets the only state that the store has, therefore its equivalent + // to creating a new one + batchMerkleStore.mergeIntoParent(); + } else if ( + type.type + .equals(StateTransitionType.closeAndThrowAway) + .toBoolean() + ) { + apply = false; + + danglingStateRoot = finalizedStateRoot; + + batchMerkleStore = new SyncCachedMerkleTreeStore(merkleStore); + tree = new RollupMerkleTree(batchMerkleStore); + } else { + throw new Error("Unreachable"); + } + + batchList.push({ + batchHash: currentSTList.commitment, + applied: Bool(apply), + }); + + if (witnessRoot.toBoolean()) { + witnessedRootsList.push({ + root: finalizedStateRoot, + appliedBatchListState: batchList.commitment, + }); + } + + currentSTList = + new DefaultProvableHashList( + ProvableStateTransition + ); + } + + return [merkleWitness, witnessRoot] as const; + } + ); + + return { + batch, + merkleWitnesses: witnesses.map(([merkleWitness]) => merkleWitness), + publicInput, + batchState, + witnessRoots: { + values: witnesses.map(([, witnessRoot]) => witnessRoot), + }, + }; + }); + } +} diff --git a/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts b/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts new file mode 100644 index 000000000..4b30745b5 --- /dev/null +++ b/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts @@ -0,0 +1,236 @@ +import { + BlockProver, + BlockProverMultiTransactionExecutionData, + BlockProverProgrammable, + BlockProverPublicInput, + BlockProverSingleTransactionExecutionData, + BlockProverTransactionArguments, + MandatoryProtocolModulesRecord, + NetworkState, + Protocol, +} from "@proto-kit/protocol"; +import { Bool, Field } from "o1js"; +import { MAX_FIELD } from "@proto-kit/common"; +import { toStateTransitionsHash } from "@proto-kit/module"; +import { inject, injectable } from "tsyringe"; + +import { TransactionExecutionResult } from "../../../storage/model/Block"; +import { PendingTransaction } from "../../../mempool/PendingTransaction"; +import type { RuntimeProofParameters } from "../tasks/RuntimeProvingTask"; +import { + TransactionProverTaskParameters, + TransactionProvingType, +} from "../tasks/serializers/types/TransactionProvingTypes"; +import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; +import { VerificationKeyService } from "../../runtime/RuntimeVerificationKeyService"; + +import type { BlockTracingState, TaskStateRecord } from "./BlockTracingService"; + +export type TransactionTrace = + | { + type: TransactionProvingType.SINGLE; + transaction: TransactionProverTaskParameters; + runtime: [RuntimeProofParameters]; + } + | { + type: TransactionProvingType.MULTI; + transaction: TransactionProverTaskParameters; + runtime: [RuntimeProofParameters, RuntimeProofParameters]; + }; + +export function collectStartingState( + stateTransitions: UntypedStateTransition[] +): TaskStateRecord { + const stateEntries = stateTransitions + // Filter distinct + .filter( + (st, index, array) => + array.findIndex((st2) => st2.path.toBigInt() === st.path.toBigInt()) === + index + ) + // Filter out STs that have isSome: false as precondition, because this means + // "state hasn't been set before" and has to correlate to a precondition on Field(0) + // and for that the state has to be undefined + .filter((st) => st.fromValue.isSome.toBoolean()) + .map((st) => [st.path.toString(), st.fromValue.value]); + + return Object.fromEntries(stateEntries); +} + +@injectable() +export class TransactionTracingService { + private readonly blockProver: BlockProverProgrammable; + + public constructor( + private readonly verificationKeyService: VerificationKeyService, + @inject("Protocol") protocol: Protocol + ) { + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + this.blockProver = (protocol.blockProver as BlockProver).zkProgrammable; + } + + public async getTransactionData( + transaction: PendingTransaction + ): Promise { + const verificationKeyAttestation = + this.verificationKeyService.getAttestation( + transaction.methodId.toBigInt() + ); + + return { + transaction: transaction.toRuntimeTransaction(), + signature: transaction.signature, + verificationKeyAttestation, + }; + } + + private getTransactionProofPublicInput( + previousState: BlockTracingState + ): BlockProverPublicInput { + return { + stateRoot: previousState.stateRoot, + transactionsHash: previousState.transactionList.commitment, + eternalTransactionsHash: previousState.eternalTransactionsList.commitment, + incomingMessagesHash: previousState.incomingMessages.commitment, + networkStateHash: previousState.networkState.hash(), + witnessedRootsHash: previousState.witnessedRoots.commitment, + pendingSTBatchesHash: previousState.pendingSTBatches.commitment, + blockHashRoot: Field(0), + blockNumber: MAX_FIELD, + }; + } + + private appendTransactionToState( + previousState: BlockTracingState, + transaction: TransactionExecutionResult + ) { + // TODO Remove this call and instead reuse results from sequencing + const newState = this.blockProver.addTransactionToBundle( + previousState, + Bool(transaction.tx.isMessage), + transaction.tx.toRuntimeTransaction() + ); + + transaction.stateTransitions.forEach((batch) => { + newState.pendingSTBatches.push({ + applied: Bool(batch.applied), + batchHash: toStateTransitionsHash(batch.stateTransitions), + }); + }); + + return newState; + } + + private createRuntimeProofParams( + tx: TransactionExecutionResult, + networkState: NetworkState + ): RuntimeProofParameters { + const startingState = collectStartingState( + tx.stateTransitions[1].stateTransitions + ); + + return { + tx: tx.tx, + networkState, + state: startingState, + }; + } + + private async traceTransaction( + previousState: BlockTracingState, + transaction: TransactionExecutionResult + ) { + const beforeHookStartingState = collectStartingState( + transaction.stateTransitions[0].stateTransitions.flat() + ); + + const runtimeTrace1 = this.createRuntimeProofParams( + transaction, + previousState.networkState + ); + + const afterHookStartingState = collectStartingState( + transaction.stateTransitions[2].stateTransitions.flat() + ); + + const newState = this.appendTransactionToState(previousState, transaction); + + return { + state: newState, + runtime: runtimeTrace1, + startingState: [beforeHookStartingState, afterHookStartingState], + }; + } + + public async createSingleTransactionTrace( + previousState: BlockTracingState, + transaction: TransactionExecutionResult + ): Promise<[BlockTracingState, TransactionTrace]> { + const publicInput = this.getTransactionProofPublicInput(previousState); + + const { + state: newState, + startingState, + runtime, + } = await this.traceTransaction(previousState, transaction); + + const transactionTrace: TransactionProverTaskParameters = + { + executionData: { + transaction: await this.getTransactionData(transaction.tx), + networkState: previousState.networkState, + }, + startingState, + publicInput, + }; + + return [ + newState, + { + type: TransactionProvingType.SINGLE, + transaction: transactionTrace, + runtime: [runtime], + }, + ]; + } + + public async createMultiTransactionTrace( + previousState: BlockTracingState, + transaction1: TransactionExecutionResult, + transaction2: TransactionExecutionResult + ): Promise<[BlockTracingState, TransactionTrace]> { + const publicInput = this.getTransactionProofPublicInput(previousState); + + const { + state: tmpState, + startingState: startingState1, + runtime: runtime1, + } = await this.traceTransaction(previousState, transaction1); + + const { + state: resultState, + startingState: startingState2, + runtime: runtime2, + } = await this.traceTransaction(tmpState, transaction2); + + const transactionTrace: TransactionProverTaskParameters = + { + executionData: { + transaction1: await this.getTransactionData(transaction1.tx), + transaction2: await this.getTransactionData(transaction2.tx), + networkState: previousState.networkState, + }, + startingState: [...startingState1, ...startingState2], + publicInput, + }; + + return [ + resultState, + { + type: TransactionProvingType.MULTI, + transaction: transactionTrace, + runtime: [runtime1, runtime2], + }, + ]; + } +} diff --git a/packages/sequencer/src/protocol/production/trigger/BlockTrigger.ts b/packages/sequencer/src/protocol/production/trigger/BlockTrigger.ts index 97252e565..348699065 100644 --- a/packages/sequencer/src/protocol/production/trigger/BlockTrigger.ts +++ b/packages/sequencer/src/protocol/production/trigger/BlockTrigger.ts @@ -13,7 +13,6 @@ import { BlockQueue } from "../../../storage/repositories/BlockStorage"; import { SequencerModule } from "../../../sequencer/builder/SequencerModule"; import { SettlementModule } from "../../../settlement/SettlementModule"; import { Block, BlockWithResult } from "../../../storage/model/Block"; -import { BatchStorage } from "../../../storage/repositories/BatchStorage"; import { SettlementStorage } from "../../../storage/repositories/SettlementStorage"; /** @@ -46,7 +45,6 @@ export class BlockTriggerBase< protected readonly batchProducerModule: BatchProducerModule | undefined, protected readonly settlementModule: SettlementModule | undefined, protected readonly blockQueue: BlockQueue, - protected readonly batchQueue: BatchStorage, protected readonly settlementStorage: SettlementStorage | undefined ) { super(); @@ -57,7 +55,6 @@ export class BlockTriggerBase< if (blocks.length > 0) { const batch = await this.batchProducerModule?.createBatch(blocks); if (batch !== undefined) { - await this.batchQueue.pushBatch(batch); this.events.emit("batch-produced", batch); } return batch; diff --git a/packages/sequencer/src/protocol/production/trigger/ManualBlockTrigger.ts b/packages/sequencer/src/protocol/production/trigger/ManualBlockTrigger.ts index 386991250..d0bc9e312 100644 --- a/packages/sequencer/src/protocol/production/trigger/ManualBlockTrigger.ts +++ b/packages/sequencer/src/protocol/production/trigger/ManualBlockTrigger.ts @@ -8,7 +8,6 @@ import { BlockProducerModule } from "../sequencing/BlockProducerModule"; import { Block, BlockWithResult } from "../../../storage/model/Block"; import { BlockQueue } from "../../../storage/repositories/BlockStorage"; import { SettlementModule } from "../../../settlement/SettlementModule"; -import { BatchStorage } from "../../../storage/repositories/BatchStorage"; import { SettlementStorage } from "../../../storage/repositories/SettlementStorage"; import { BlockTrigger, BlockTriggerBase } from "./BlockTrigger"; @@ -27,8 +26,6 @@ export class ManualBlockTrigger settlementModule: SettlementModule | undefined, @inject("BlockQueue") blockQueue: BlockQueue, - @inject("BatchStorage") - batchStorage: BatchStorage, @injectOptional("SettlementStorage") settlementStorage: SettlementStorage | undefined ) { @@ -38,7 +35,6 @@ export class ManualBlockTrigger settlementModule, blockQueue, - batchStorage, settlementStorage ); } diff --git a/packages/sequencer/src/protocol/production/trigger/TimedBlockTrigger.ts b/packages/sequencer/src/protocol/production/trigger/TimedBlockTrigger.ts index 4b0300be0..e36a80f60 100644 --- a/packages/sequencer/src/protocol/production/trigger/TimedBlockTrigger.ts +++ b/packages/sequencer/src/protocol/production/trigger/TimedBlockTrigger.ts @@ -9,7 +9,6 @@ import { BlockQueue } from "../../../storage/repositories/BlockStorage"; import { BlockProducerModule } from "../sequencing/BlockProducerModule"; import { SettlementModule } from "../../../settlement/SettlementModule"; import { SettlementStorage } from "../../../storage/repositories/SettlementStorage"; -import { BatchStorage } from "../../../storage/repositories/BatchStorage"; import { BlockEvents, BlockTrigger, BlockTriggerBase } from "./BlockTrigger"; @@ -50,8 +49,6 @@ export class TimedBlockTrigger settlementModule: SettlementModule | undefined, @inject("BlockQueue") blockQueue: BlockQueue, - @inject("BatchStorage") - batchStorage: BatchStorage, @injectOptional("SettlementStorage") settlementStorage: SettlementStorage | undefined, @inject("Mempool") @@ -62,7 +59,6 @@ export class TimedBlockTrigger batchProducerModule, settlementModule, blockQueue, - batchStorage, settlementStorage ); } diff --git a/packages/sequencer/src/settlement/SettlementModule.ts b/packages/sequencer/src/settlement/SettlementModule.ts index a4e34220c..0f8f2b2a0 100644 --- a/packages/sequencer/src/settlement/SettlementModule.ts +++ b/packages/sequencer/src/settlement/SettlementModule.ts @@ -28,6 +28,7 @@ import { AreProofsEnabled, DependencyFactory, } from "@proto-kit/common"; +// eslint-disable-next-line import/no-extraneous-dependencies import truncate from "lodash/truncate"; import { diff --git a/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts b/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts index 27c3ae6e1..677668abd 100644 --- a/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts +++ b/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts @@ -134,6 +134,8 @@ export class CachedMerkleTreeStore this.parent.writeNodes(writes); await this.parent.commit(); + + // Reset write cache this.resetWrittenNodes(); } diff --git a/packages/sequencer/src/storage/inmemory/InMemoryBlockStorage.ts b/packages/sequencer/src/storage/inmemory/InMemoryBlockStorage.ts index cf332bb44..9a14f7d8c 100644 --- a/packages/sequencer/src/storage/inmemory/InMemoryBlockStorage.ts +++ b/packages/sequencer/src/storage/inmemory/InMemoryBlockStorage.ts @@ -11,7 +11,6 @@ import type { BlockWithMaybeResult, BlockWithResult, } from "../model/Block"; -import { BlockWithPreviousResult } from "../../protocol/production/BatchProducerModule"; import { BatchStorage } from "../repositories/BatchStorage"; @injectable() @@ -65,7 +64,7 @@ export class InMemoryBlockStorage return result; } - public async getNewBlocks(): Promise { + public async getNewBlocks(): Promise { const latestBatch = await this.batchStorage.getLatestBatch(); let cursor = 0; @@ -89,11 +88,8 @@ export class InMemoryBlockStorage } return slice.map((block, index) => ({ - block: { - block, - result: results[index + 1]!, - }, - lastBlockResult: results[index], + block, + result: results[index + 1]!, })); } diff --git a/packages/sequencer/src/storage/model/Block.ts b/packages/sequencer/src/storage/model/Block.ts index ef7c09943..8f1da00a4 100644 --- a/packages/sequencer/src/storage/model/Block.ts +++ b/packages/sequencer/src/storage/model/Block.ts @@ -10,30 +10,41 @@ import { RollupMerkleTree } from "@proto-kit/common"; import { PendingTransaction } from "../../mempool/PendingTransaction"; import { UntypedStateTransition } from "../../protocol/production/helpers/UntypedStateTransition"; +export interface StateTransitionBatch { + stateTransitions: UntypedStateTransition[]; + applied: boolean; +} + export interface TransactionExecutionResult { tx: PendingTransaction; - stateTransitions: UntypedStateTransition[]; - protocolTransitions: UntypedStateTransition[]; + stateTransitions: StateTransitionBatch[]; status: Bool; statusMessage?: string; events: { eventName: string; data: Field[] }[]; } +// TODO Why is Block using Fields, but BlockResult bigints? Align that towards the best option + export interface Block { hash: Field; + previousBlockHash: Field | undefined; height: Field; networkState: { before: NetworkState; during: NetworkState; }; + transactions: TransactionExecutionResult[]; transactionsHash: Field; - toEternalTransactionsHash: Field; + fromEternalTransactionsHash: Field; fromBlockHashRoot: Field; fromMessagesHash: Field; + fromStateRoot: Field; + toEternalTransactionsHash: Field; toMessagesHash: Field; - previousBlockHash: Field | undefined; + + beforeBlockStateTransitions: UntypedStateTransition[]; } // eslint-disable-next-line @typescript-eslint/no-redeclare @@ -49,10 +60,11 @@ export const Block = { export interface BlockResult { blockHash: bigint; + witnessedRoots: [bigint]; stateRoot: bigint; blockHashRoot: bigint; afterNetworkState: NetworkState; - blockStateTransitions: UntypedStateTransition[]; + afterBlockStateTransitions: UntypedStateTransition[]; blockHashWitness: BlockHashMerkleTreeWitness; } @@ -68,6 +80,11 @@ export interface BlockWithMaybeResult { // eslint-disable-next-line @typescript-eslint/no-redeclare export const BlockWithResult = { + // toBlockProverState: ({ block, result }: BlockWithResult) => ({ + // stateRoot: result.stateRoot, + // + // } satisfies BlockProverStateCommitments), + createEmpty: () => ({ block: { @@ -84,7 +101,9 @@ export const BlockWithResult = { }, fromBlockHashRoot: Field(BlockHashMerkleTree.EMPTY_ROOT), fromMessagesHash: Field(0), + fromStateRoot: Field(RollupMerkleTree.EMPTY_ROOT), toMessagesHash: ACTIONS_EMPTY_HASH, + beforeBlockStateTransitions: [], previousBlockHash: undefined, }, @@ -92,9 +111,10 @@ export const BlockWithResult = { afterNetworkState: NetworkState.empty(), stateRoot: RollupMerkleTree.EMPTY_ROOT, blockHashRoot: BlockHashMerkleTree.EMPTY_ROOT, - blockStateTransitions: [], + afterBlockStateTransitions: [], blockHashWitness: BlockHashMerkleTree.WITNESS.dummy(), blockHash: 0n, + witnessedRoots: [RollupMerkleTree.EMPTY_ROOT], }, }) satisfies BlockWithResult, }; diff --git a/packages/sequencer/src/storage/repositories/BlockStorage.ts b/packages/sequencer/src/storage/repositories/BlockStorage.ts index 2347162ff..4803ba5a5 100644 --- a/packages/sequencer/src/storage/repositories/BlockStorage.ts +++ b/packages/sequencer/src/storage/repositories/BlockStorage.ts @@ -1,5 +1,4 @@ -import { BlockWithPreviousResult } from "../../protocol/production/BatchProducerModule"; -import { +import type { Block, BlockResult, BlockWithMaybeResult, @@ -9,7 +8,7 @@ import { export interface BlockQueue { pushBlock: (block: Block) => Promise; pushResult: (result: BlockResult) => Promise; - getNewBlocks: () => Promise; + getNewBlocks: () => Promise; getLatestBlockAndResult: () => Promise; } diff --git a/packages/sequencer/src/worker/worker/FlowTaskWorker.ts b/packages/sequencer/src/worker/worker/FlowTaskWorker.ts index 9dbe2a70d..447ca974d 100644 --- a/packages/sequencer/src/worker/worker/FlowTaskWorker.ts +++ b/packages/sequencer/src/worker/worker/FlowTaskWorker.ts @@ -61,8 +61,8 @@ export class FlowTaskWorker[]> const payload = error instanceof Error ? error.message : JSON.stringify(error); - log.info("Error in worker (detailed trace): "); - log.info(error); + log.error("Error in worker (detailed trace): "); + log.error(error); return { status: "error", diff --git a/packages/sequencer/src/worker/worker/LocalTaskWorkerModule.ts b/packages/sequencer/src/worker/worker/LocalTaskWorkerModule.ts index 0619918c4..5b55fddc8 100644 --- a/packages/sequencer/src/worker/worker/LocalTaskWorkerModule.ts +++ b/packages/sequencer/src/worker/worker/LocalTaskWorkerModule.ts @@ -136,7 +136,7 @@ export class VanillaTaskWorkerModules { RuntimeProvingTask, TransactionProvingTask, BlockReductionTask, - BlockBuildingTask: NewBlockTask, + NewBlockTask, CircuitCompilerTask, WorkerRegistrationTask, } satisfies TaskWorkerModulesRecord; @@ -154,8 +154,8 @@ export class VanillaTaskWorkerModules { StateTransitionTask: {}, RuntimeProvingTask: {}, TransactionProvingTask: {}, - BlockBuildingTask: {}, BlockReductionTask: {}, + NewBlockTask: {}, StateTransitionReductionTask: {}, SettlementProvingTask: {}, CircuitCompilerTask: {}, diff --git a/packages/sequencer/test/integration/BlockProduction.test.ts b/packages/sequencer/test/integration/BlockProduction.test.ts index 0e8077103..3dea83e6e 100644 --- a/packages/sequencer/test/integration/BlockProduction.test.ts +++ b/packages/sequencer/test/integration/BlockProduction.test.ts @@ -172,7 +172,7 @@ describe("block production", () => { }); it("should produce a dummy block proof", async () => { - expect.assertions(27); + expect.assertions(26); const privateKey = PrivateKey.random(); const publicKey = privateKey.toPublicKey(); @@ -192,8 +192,13 @@ describe("block production", () => { expect(block!.transactions[0].status.toBoolean()).toBe(true); expect(block!.transactions[0].statusMessage).toBeUndefined(); - expect(block!.transactions[0].stateTransitions).toHaveLength(1); - expect(block!.transactions[0].protocolTransitions).toHaveLength(2); + expect(block!.transactions[0].stateTransitions).toHaveLength(3); + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(1); const latestBlockWithResult = await sequencer .resolve("BlockQueue") @@ -224,12 +229,13 @@ describe("block production", () => { balanceModule.balances.keyType, publicKey ); - const newState = await test.getState(balancesPath, "batch"); + // TODO + // const newState = await test.getState(balancesPath, "batch"); const newUnprovenState = await test.getState(balancesPath, "block"); - expect(newState).toBeDefined(); + // expect(newState).toBeDefined(); expect(newUnprovenState).toBeDefined(); - expect(UInt64.fromFields(newState!).toString()).toStrictEqual("100"); + // expect(UInt64.fromFields(newState!).toString()).toStrictEqual("100"); expect(UInt64.fromFields(newUnprovenState!).toString()).toStrictEqual( "100" ); @@ -241,7 +247,7 @@ describe("block production", () => { accountModule.accountState.keyType, publicKey ); - const newAccountState = await test.getState(accountStatePath, "batch"); + const newAccountState = await test.getState(accountStatePath, "block"); expect(newAccountState).toBeDefined(); expect(AccountState.fromFields(newAccountState!).nonce.toBigInt()).toBe(1n); @@ -266,7 +272,7 @@ describe("block production", () => { expect(batch!.blockHashes).toHaveLength(1); expect(batch!.proof.proof).toBe(MOCK_PROOF); - const state2 = await test.getState(balancesPath, "batch"); + const state2 = await test.getState(balancesPath, "block"); expect(state2).toBeDefined(); expect(UInt64.fromFields(state2!)).toStrictEqual(UInt64.from(200)); @@ -325,8 +331,12 @@ describe("block production", () => { expect(block!.transactions[0].status.toBoolean()).toBe(true); expect(block!.transactions[0].statusMessage).toBeUndefined(); - expect(block!.transactions[0].stateTransitions).toHaveLength(1); - expect(block!.transactions[0].protocolTransitions).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(1); await test.produceBlock(); @@ -381,6 +391,8 @@ describe("block production", () => { const numberTxs = 3; it("should produce block with multiple transaction", async () => { + log.setLevel("TRACE"); + expect.assertions(6 + 4 * numberTxs); const privateKey = PrivateKey.random(); @@ -405,7 +417,8 @@ describe("block production", () => { expect(block!.transactions[index].status.toBoolean()).toBe(true); expect(block!.transactions[index].statusMessage).toBe(undefined); - const transitions = block!.transactions[index].stateTransitions; + const transitions = + block!.transactions[index].stateTransitions[1].stateTransitions; const fromBalance = increment * index; expect(transitions[0].fromValue.value[0].toBigInt()).toStrictEqual( @@ -427,7 +440,7 @@ describe("block production", () => { balanceModule.balances.keyType, publicKey ); - const newState = await test.getState(balancesPath, "batch"); + const newState = await test.getState(balancesPath, "block"); expect(newState).toBeDefined(); expect(UInt64.fromFields(newState!)).toStrictEqual( @@ -438,6 +451,8 @@ describe("block production", () => { it("should produce a block with a mix of failing and succeeding transactions and empty blocks", async () => { expect.assertions(7); + log.setLevel("TRACE"); + const pk1 = PrivateKey.random(); const pk2 = PrivateKey.random(); @@ -456,6 +471,8 @@ describe("block production", () => { await test.produceBlock(); const batch = await test.produceBatch(); + console.log("Pt1"); + expect(block).toBeDefined(); expect(batch!.blockHashes).toHaveLength(2); @@ -467,7 +484,7 @@ describe("block production", () => { balanceModule.balances.keyType, pk1.toPublicKey() ); - const newState1 = await test.getState(balancesPath1, "batch"); + const newState1 = await test.getState(balancesPath1, "block"); expect(newState1).toBeUndefined(); @@ -476,7 +493,7 @@ describe("block production", () => { balanceModule.balances.keyType, pk2.toPublicKey() ); - const newState2 = await test.getState(balancesPath2, "batch"); + const newState2 = await test.getState(balancesPath2, "block"); expect(newState2).toBeDefined(); expect(UInt64.fromFields(newState2!)).toStrictEqual(UInt64.from(100)); @@ -490,7 +507,7 @@ describe("block production", () => { // TODO Test with batch that only consists of empty blocks - it.skip.each([ + it.each([ [2, 1, 1], [1, 2, 1], [1, 1, 2], @@ -579,7 +596,7 @@ describe("block production", () => { expect(batch!.proof.proof).toBe(MOCK_PROOF); const supplyPath = Path.fromProperty("Balance", "totalSupply"); - const newState = await test.getState(supplyPath, "batch"); + const newState = await test.getState(supplyPath, "block"); expect(newState).toBeDefined(); expect(UInt64.fromFields(newState!)).toStrictEqual( @@ -595,7 +612,7 @@ describe("block production", () => { pk2 ); - const newBalance = await test.getState(balancesPath, "batch"); + const newBalance = await test.getState(balancesPath, "block"); expect(newBalance).toBeDefined(); expect(UInt64.fromFields(newBalance!)).toStrictEqual(UInt64.from(200)); @@ -618,8 +635,12 @@ describe("block production", () => { expect(block!.transactions[0].status.toBoolean()).toBe(true); expect(block!.transactions[0].statusMessage).toBeUndefined(); - expect(block!.transactions[0].stateTransitions).toHaveLength(0); - expect(block!.transactions[0].protocolTransitions).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(0); const batch = await test.produceBatch(); diff --git a/packages/sequencer/test/integration/StorageIntegration.test.ts b/packages/sequencer/test/integration/StorageIntegration.test.ts index 5dc4cb36f..9fa4543b6 100644 --- a/packages/sequencer/test/integration/StorageIntegration.test.ts +++ b/packages/sequencer/test/integration/StorageIntegration.test.ts @@ -150,10 +150,9 @@ describe.each([["InMemory", InMemoryDatabase]])( expect(blocks).toHaveLength(1); - const { lastBlockResult, block } = blocks[0]; + const { block } = blocks[0]; - expect(lastBlockResult).toBeUndefined(); - expect(block.block.hash.toBigInt()).toStrictEqual( + expect(block.hash.toBigInt()).toStrictEqual( generatedBlock.hash.toBigInt() ); @@ -161,7 +160,7 @@ describe.each([["InMemory", InMemoryDatabase]])( "BlockStorage" ) as HistoricalBlockStorage & BlockStorage; const block2 = await blockStorage.getBlockAt( - Number(blocks[0].block.block.height.toString()) + Number(blocks[0].block.height.toString()) ); expectDefined(block2); @@ -170,8 +169,8 @@ describe.each([["InMemory", InMemoryDatabase]])( ); const stateDiff = collectStateDiff( - block.block.transactions.flatMap((tx) => - tx.stateTransitions.concat(tx.protocolTransitions) + block.transactions.flatMap((tx) => + tx.stateTransitions.flatMap((batch) => batch.stateTransitions) ) ); diff --git a/packages/sequencer/test/integration/mocks/ProtocolStateTestHook.ts b/packages/sequencer/test/integration/mocks/ProtocolStateTestHook.ts index 33ce0163e..b47b625bb 100644 --- a/packages/sequencer/test/integration/mocks/ProtocolStateTestHook.ts +++ b/packages/sequencer/test/integration/mocks/ProtocolStateTestHook.ts @@ -1,10 +1,12 @@ import { - BlockProverExecutionData, + AfterTransactionHookArguments, + BeforeTransactionHookArguments, protocolState, ProvableTransactionHook, StateMap, } from "@proto-kit/protocol"; import { Field } from "o1js"; +import { noop } from "@proto-kit/common"; /** * A hook used to test protocolstate inside the blockproduction tests @@ -12,8 +14,8 @@ import { Field } from "o1js"; export class ProtocolStateTestHook extends ProvableTransactionHook { @protocolState() methodIdInvocations = StateMap.from(Field, Field); - public async onTransaction( - executionData: BlockProverExecutionData + public async beforeTransaction( + executionData: BeforeTransactionHookArguments ): Promise { const { methodId } = executionData.transaction; const invocations = await this.methodIdInvocations.get(methodId); @@ -22,4 +24,8 @@ export class ProtocolStateTestHook extends ProvableTransactionHook { invocations.orElse(Field(0)).add(1) ); } + + public async afterTransaction(execution: AfterTransactionHookArguments) { + noop(); + } } diff --git a/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts b/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts new file mode 100644 index 000000000..37932aea0 --- /dev/null +++ b/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts @@ -0,0 +1,366 @@ +import "reflect-metadata"; + +import { + AppliedBatchHashList, + Option, + ProtocolConstants, + StateTransition, + StateTransitionProver, + WitnessedRootHashList, +} from "@proto-kit/protocol"; +import { Bool, Field } from "o1js"; +import { mapSequential, RollupMerkleTree } from "@proto-kit/common"; +import { toStateTransitionsHash } from "@proto-kit/module"; + +import { + CachedMerkleTreeStore, + InMemoryAsyncMerkleTreeStore, + UntypedStateTransition, + StateTransitionTracingService, + TracingStateTransitionBatch, + StateTransitionProofParameters, +} from "../../../src"; + +function createST(obj: { + path: string; + from: string | undefined; + to: string | undefined; +}): UntypedStateTransition { + const st = StateTransition.fromTo( + Field(obj.path), + Option.from(Bool(obj.from !== undefined), Field(obj.from ?? "0"), Field), + Option.from(Bool(obj.to !== undefined), Field(obj.to ?? "0"), Field) + ); + if (obj.from === undefined) { + st.from.forceSome(); + } + return UntypedStateTransition.fromStateTransition(st); +} + +function createSTSimple( + path: string, + from: string | undefined, + to: string | undefined = undefined +) { + return createST({ + path, + from, + to, + }); +} + +async function applyBatchesToTree( + batches: TracingStateTransitionBatch[], + cached: CachedMerkleTreeStore +) { + const sts = batches + .filter((x) => x.applied) + .flatMap(({ stateTransitions }) => stateTransitions); + + const tree = new RollupMerkleTree(cached); + + await mapSequential(sts, async (st) => { + await cached.preloadKey(st.path.toBigInt()); + + if (st.to.isSome.toBoolean()) { + tree.setLeaf(st.path.toBigInt(), st.to.treeValue); + } + }); + + return tree; +} + +// async function prepareContainerForFlow() { +// const SequencerC = Sequencer.from({ +// modules: { +// TaskQueue: LocalTaskQueue, +// LocalTaskWorkerModule: LocalTaskWorkerModule.from({ +// StateTransitionTask, +// StateTransitionReductionTask, +// }), +// }, +// }); +// +// const c = container.createChildContainer(); +// +// c.register("Protocol", { +// useFactory: () => { +// const protocol = new (Protocol.from({ +// modules: VanillaProtocolModules.mandatoryModules({}), +// }))(); +// protocol.configure({ +// ...VanillaProtocolModules.mandatoryConfig(), +// }); +// protocol.create(() => c.createChildContainer()); +// +// return protocol; +// }, +// }); +// c.register("AreProofsEnabled", { +// useClass: InMemoryAreProofsEnabled, +// }); +// +// const sequencer = new SequencerC(); +// sequencer.configure({ +// LocalTaskWorkerModule: { +// StateTransitionTask: {}, +// StateTransitionReductionTask: {}, +// }, +// TaskQueue: {}, +// }); +// sequencer.create(() => c.createChildContainer()); +// await sequencer.start(); +// +// return sequencer; +// } + +const service = new StateTransitionTracingService(); + +describe("StateTransitionTracingService", () => { + const cases: { + batch: TracingStateTransitionBatch[]; + numSTs: number; + }[] = [ + { + batch: [ + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("1", undefined, "1")], + }, + { + witnessRoot: true, + applied: true, + stateTransitions: [createSTSimple("100", undefined, "100")], + }, + { + witnessRoot: false, + applied: false, + stateTransitions: [], + }, + { + witnessRoot: true, + applied: true, + stateTransitions: [ + createSTSimple("2", undefined, "2"), + createSTSimple("3", undefined, "3"), + ], + }, + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("2", "2", "4")], + }, + ], + numSTs: 5, + }, + ]; + + describe.each(cases)("root accumulator", ({ batch, numSTs }) => { + it("should match", () => {}); + }); + + describe.each(cases)("tracing two chunks of STs", ({ batch, numSTs }) => { + const store = new InMemoryAsyncMerkleTreeStore(); + const cached = new CachedMerkleTreeStore(store); + + let trace: StateTransitionProofParameters[]; + + beforeAll(async () => { + trace = await service.createMerkleTrace(cached, batch); + }); + + it("trace should have correct length", async () => { + expect(trace).toHaveLength( + Math.ceil(numSTs / ProtocolConstants.stateTransitionProverBatchSize) + ); + }); + + it("should set second publicInput correctly", async () => { + const tree = await applyBatchesToTree( + batch.slice(0, 4), + new CachedMerkleTreeStore(store) + ); + + expect(trace[1].publicInput.root.toString()).toStrictEqual( + tree.getRoot().toString() + ); + + const batchList = new AppliedBatchHashList(); + batchList.push({ + batchHash: toStateTransitionsHash(batch[0].stateTransitions), + applied: Bool(true), + }); + batchList.push({ + batchHash: toStateTransitionsHash(batch[1].stateTransitions), + applied: Bool(true), + }); + + const tempBatchListHash = batchList.commitment; + + batchList.push({ + batchHash: toStateTransitionsHash(batch[2].stateTransitions), + applied: Bool(true), + }); + batchList.push({ + batchHash: toStateTransitionsHash(batch[3].stateTransitions), + applied: Bool(true), + }); + + expect(trace[1].publicInput.batchesHash.toString()).toStrictEqual( + batchList.commitment.toString() + ); + + const witnessedRootsList = new WitnessedRootHashList(); + const tempTree = await applyBatchesToTree( + batch.slice(0, 2), + new CachedMerkleTreeStore(store) + ); + + witnessedRootsList.push({ + root: tempTree.getRoot(), + appliedBatchListState: tempBatchListHash, + }); + witnessedRootsList.push({ + root: tree.getRoot(), + appliedBatchListState: batchList.commitment, + }); + + expect(trace[1].publicInput.witnessedRootsHash.toString()).toStrictEqual( + witnessedRootsList.commitment.toString() + ); + }); + }); + + describe("tracing two separate sequences", () => { + const store = new InMemoryAsyncMerkleTreeStore(); + const cached = new CachedMerkleTreeStore(store); + + let trace1: StateTransitionProofParameters[]; + let trace2: StateTransitionProofParameters[]; + let tree1: RollupMerkleTree; + + const batches: TracingStateTransitionBatch[][] = [ + [ + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("1", undefined, "1")], + }, + { + witnessRoot: true, + applied: true, + stateTransitions: [createSTSimple("100", undefined, "100")], + }, + ], + [ + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("1", "1", "2")], + }, + ], + ]; + + beforeAll(async () => { + trace1 = await service.createMerkleTrace(cached, batches[0]); + + const cached2 = new CachedMerkleTreeStore(store); + tree1 = await applyBatchesToTree(batches[0], cached2); + + trace2 = await service.createMerkleTrace(cached, batches[1]); + }); + + it("check rootAccumulator is zero", () => { + expect(trace1[0].publicInput.witnessedRootsHash.toString()).toBe("0"); + expect(trace2[0].publicInput.witnessedRootsHash.toString()).toBe("0"); + }); + + it("check currentBatchHash is zero", () => { + expect(trace1[0].publicInput.currentBatchStateHash.toString()).toBe("0"); + expect(trace2[0].publicInput.currentBatchStateHash.toString()).toBe("0"); + }); + + it("check batchesHash is zero", () => { + expect(trace1[0].publicInput.batchesHash.toString()).toBe("0"); + expect(trace2[0].publicInput.batchesHash.toString()).toBe("0"); + }); + + it("check matching PI root", async () => { + expect(tree1.getRoot().toString()).toStrictEqual( + trace2[0].publicInput.root.toString() + ); + }); + }); + + describe("should trace correctly", () => { + const store = new InMemoryAsyncMerkleTreeStore(); + const cached = new CachedMerkleTreeStore(store); + + const batches: TracingStateTransitionBatch[] = [ + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("1", undefined, "1")], + }, + { + witnessRoot: true, + applied: true, + stateTransitions: [createSTSimple("100", undefined, "100")], + }, + { + witnessRoot: false, + applied: true, + stateTransitions: [createSTSimple("100", "100", "200")], + }, + ]; + + let trace: StateTransitionProofParameters[]; + + beforeAll(async () => { + trace = await service.createMerkleTrace(cached, batches); + }); + + it("check trace well-formed", () => { + expect(trace).toHaveLength(1); + + batches.forEach(({ witnessRoot }, index) => { + expect( + trace[ + Math.floor(index / ProtocolConstants.stateTransitionProverBatchSize) + ].batch.batch[index].witnessRoot.toBoolean() + ).toBe(witnessRoot); + }); + }); + + it("check if batch is provable", async () => { + const prover = new StateTransitionProver().zkProgrammable; + + await mapSequential(trace, async (batch) => { + const result = await prover.proveBatch( + batch.publicInput, + batch.batch, + { witnesses: batch.merkleWitnesses }, + batch.batchState + ); + + expect(result).toBeDefined(); + + // Check that root matches + const tree = new RollupMerkleTree(cached); + expect(result.root.toString()).toStrictEqual(tree.getRoot().toString()); + }); + }); + + it("check that STs have been applied to the tree store", async () => { + const tracedTree = new RollupMerkleTree(cached); + + const cached2 = new CachedMerkleTreeStore(store); + const tree = await applyBatchesToTree(batches, cached2); + + expect(tracedTree.getRoot().toString()).toStrictEqual( + tree.getRoot().toString() + ); + }); + }); +}); diff --git a/packages/sequencer/test/settlement/Settlement.test.ts b/packages/sequencer/test/settlement/Settlement.test.ts index fd7ee60c5..00c61f3a1 100644 --- a/packages/sequencer/test/settlement/Settlement.test.ts +++ b/packages/sequencer/test/settlement/Settlement.test.ts @@ -4,7 +4,7 @@ import { MinaBaseLayerConfig } from "../../src"; import { settlementTestFn } from "./Settlement"; -describe.each(["mock-proofs", "signed"] as const)( +describe.each(["mock-proofs" /*, "signed"*/] as const)( "Settlement contracts: local blockchain - %s", (type) => { const network: MinaBaseLayerConfig = { diff --git a/packages/sequencer/test/settlement/Settlement.ts b/packages/sequencer/test/settlement/Settlement.ts index 69cef5ed7..d580c8ea1 100644 --- a/packages/sequencer/test/settlement/Settlement.ts +++ b/packages/sequencer/test/settlement/Settlement.ts @@ -1,9 +1,9 @@ -/* eslint-disable no-inner-declarations */ import { expectDefined, mapSequential, TypedClass, RollupMerkleTree, + sleep, } from "@proto-kit/common"; import { VanillaProtocolModules } from "@proto-kit/library"; import { Runtime } from "@proto-kit/module"; @@ -77,713 +77,715 @@ export const settlementTestFn = ( }, timeout: number = 120_000 ) => { - // eslint-disable-next-line no-lone-blocks - { - let testAccounts: PrivateKey[] = []; - - const sequencerKey = PrivateKey.random(); - const settlementKey = PrivateKey.random(); - const dispatchKey = PrivateKey.random(); - const minaBridgeKey = PrivateKey.random(); - // Only needed for tests with a custom token - const tokenBridgeKey = - tokenConfig === undefined ? minaBridgeKey : PrivateKey.random(); - const tokenOwnerKey = { - tokenOwner: PrivateKey.random(), - admin: PrivateKey.random(), - }; - const tokenOwner = - tokenConfig !== undefined - ? // eslint-disable-next-line new-cap - new tokenConfig.tokenOwner(tokenOwnerKey.tokenOwner.toPublicKey()) - : undefined; - - let trigger: ManualBlockTrigger; - let settlementModule: SettlementModule; - let bridgingModule: BridgingModule; - let blockQueue: BlockQueue; - - let feeStrategy: FeeStrategy; - - let blockSerializer: BlockProofSerializer; - - const bridgedTokenId = - tokenConfig === undefined ? TokenId.default : tokenOwner!.deriveTokenId(); - - function setupAppChain() { - const runtime = Runtime.from({ - modules: { - Balances, - Withdrawals, - }, - }); + let testAccounts: PrivateKey[] = []; + + const sequencerKey = PrivateKey.random(); + const settlementKey = PrivateKey.random(); + const dispatchKey = PrivateKey.random(); + const minaBridgeKey = PrivateKey.random(); + // Only needed for tests with a custom token + const tokenBridgeKey = + tokenConfig === undefined ? minaBridgeKey : PrivateKey.random(); + const tokenOwnerKey = { + tokenOwner: PrivateKey.random(), + admin: PrivateKey.random(), + }; + const tokenOwner = + tokenConfig !== undefined + ? // eslint-disable-next-line new-cap + new tokenConfig.tokenOwner(tokenOwnerKey.tokenOwner.toPublicKey()) + : undefined; + + let trigger: ManualBlockTrigger; + let settlementModule: SettlementModule; + let bridgingModule: BridgingModule; + let blockQueue: BlockQueue; + + let feeStrategy: FeeStrategy; + + let blockSerializer: BlockProofSerializer; + + const bridgedTokenId = + tokenConfig === undefined ? TokenId.default : tokenOwner!.deriveTokenId(); + + function setupAppChain() { + const runtime = Runtime.from({ + modules: { + Balances, + Withdrawals, + }, + }); - // eslint-disable-next-line @typescript-eslint/dot-notation - SettlementUtils.prototype["isSignedSettlement"] = () => - settlementType === "signed"; + // eslint-disable-next-line @typescript-eslint/dot-notation + SettlementUtils.prototype["isSignedSettlement"] = () => + settlementType === "signed"; - const sequencer = testingSequencerFromModules( - { - BaseLayer: MinaBaseLayer, - SettlementModule: SettlementModule, - OutgoingMessageQueue: WithdrawalQueue, - }, - { - SettlementProvingTask, - } - ); + const sequencer = testingSequencerFromModules( + { + BaseLayer: MinaBaseLayer, + SettlementModule: SettlementModule, + OutgoingMessageQueue: WithdrawalQueue, + }, + { + SettlementProvingTask, + } + ); - const appchain = AppChain.from({ - Runtime: runtime, - Sequencer: sequencer, - - Protocol: Protocol.from({ - modules: { - ...VanillaProtocolModules.mandatoryModules({}), - SettlementContractModule: SettlementContractModule.with({ - FungibleToken: FungibleTokenContractModule, - FungibleTokenAdmin: FungibleTokenAdminContractModule, - }), - }, - }), + const appchain = AppChain.from({ + Runtime: runtime, + Sequencer: sequencer, + Protocol: Protocol.from({ modules: { - Signer: InMemorySigner, - TransactionSender: InMemoryTransactionSender, - QueryTransportModule: StateServiceQueryModule, - NetworkStateTransportModule: BlockStorageNetworkStateModule, + ...VanillaProtocolModules.mandatoryModules({}), + SettlementContractModule: SettlementContractModule.with({ + FungibleToken: FungibleTokenContractModule, + FungibleTokenAdmin: FungibleTokenAdminContractModule, + }), }, - }); + }), - appchain.configure({ - Runtime: { - Balances: { - totalSupply: UInt64.from(1000), - }, - Withdrawals: {}, + modules: { + Signer: InMemorySigner, + TransactionSender: InMemoryTransactionSender, + QueryTransportModule: StateServiceQueryModule, + NetworkStateTransportModule: BlockStorageNetworkStateModule, + }, + }); + + appchain.configure({ + Runtime: { + Balances: { + totalSupply: UInt64.from(1000), }, + Withdrawals: {}, + }, - Sequencer: { - Database: {}, - BlockTrigger: {}, - Mempool: {}, - BatchProducerModule: {}, - LocalTaskWorkerModule: VanillaTaskWorkerModules.defaultConfig(), - OutgoingMessageQueue: {}, - BaseLayer: baseLayerConfig, - BlockProducerModule: {}, - FeeStrategy: {}, - SettlementModule: { - feepayer: sequencerKey, - }, - SequencerStartupModule: {}, + Sequencer: { + Database: {}, + BlockTrigger: {}, + Mempool: {}, + BatchProducerModule: {}, + LocalTaskWorkerModule: VanillaTaskWorkerModules.defaultConfig(), + OutgoingMessageQueue: {}, + BaseLayer: baseLayerConfig, + BlockProducerModule: {}, + FeeStrategy: {}, + SettlementModule: { + feepayer: sequencerKey, + }, + SequencerStartupModule: {}, - TaskQueue: { - simulatedDuration: 0, - }, + TaskQueue: { + simulatedDuration: 0, }, - Protocol: { - StateTransitionProver: {}, - BlockHeight: {}, - AccountState: {}, - BlockProver: {}, - LastStateRoot: {}, - SettlementContractModule: { - SettlementContract: {}, - BridgeContract: { - withdrawalStatePath: "Withdrawals.withdrawals", - withdrawalEventName: "withdrawal", - }, - DispatchContract: { - incomingMessagesMethods: { - deposit: "Balances.deposit", - }, + }, + Protocol: { + StateTransitionProver: {}, + BlockHeight: {}, + AccountState: {}, + BlockProver: {}, + LastStateRoot: {}, + SettlementContractModule: { + SettlementContract: {}, + BridgeContract: { + withdrawalStatePath: "Withdrawals.withdrawals", + withdrawalEventName: "withdrawal", + }, + DispatchContract: { + incomingMessagesMethods: { + deposit: "Balances.deposit", }, - FungibleToken: {}, - FungibleTokenAdmin: {}, }, + FungibleToken: {}, + FungibleTokenAdmin: {}, }, - TransactionSender: {}, - QueryTransportModule: {}, - Signer: { - signer: sequencerKey, - }, - NetworkStateTransportModule: {}, - }); - - return appchain; - } + }, + TransactionSender: {}, + QueryTransportModule: {}, + Signer: { + signer: sequencerKey, + }, + NetworkStateTransportModule: {}, + }); - let appChain: ReturnType; - - async function createBatch( - withTransactions: boolean, - customNonce: number = 0, - txs: PendingTransaction[] = [] - ) { - const mempool = appChain.sequencer.resolve("Mempool") as PrivateMempool; - if (withTransactions) { - const key = testAccounts[0]; - const tx = createTransaction({ - runtime: appChain.runtime, - method: ["Balances", "mint"], - privateKey: key, - args: [bridgedTokenId, key.toPublicKey(), UInt64.from(1e9 * 100)], - nonce: customNonce, - }); + return appchain; + } - await mempool.add(tx); - } - await mapSequential(txs, async (tx) => { - await mempool.add(tx); + let appChain: ReturnType; + + async function createBatch( + withTransactions: boolean, + customNonce: number = 0, + txs: PendingTransaction[] = [] + ) { + const mempool = appChain.sequencer.resolve("Mempool") as PrivateMempool; + if (withTransactions) { + const key = testAccounts[0]; + const tx = createTransaction({ + runtime: appChain.runtime, + method: ["Balances", "mint"], + privateKey: key, + args: [bridgedTokenId, key.toPublicKey(), UInt64.from(1e9 * 100)], + nonce: customNonce, }); - const result = await trigger.produceBlockAndBatch(); - const [block, batch] = result; - - console.log( - `block ${block?.height.toString()} ${block?.fromMessagesHash.toString()} -> ${block?.toMessagesHash.toString()}` - ); - const proof = await blockSerializer - .getBlockProofSerializer() - .fromJSONProof(batch!.proof); - console.log( - `block ${proof.publicInput.incomingMessagesHash} -> ${proof.publicOutput.incomingMessagesHash}` - ); - - return result; + await mempool.add(tx); } + await mapSequential(txs, async (tx) => { + await mempool.add(tx); + }); - beforeAll(async () => { - appChain = setupAppChain(); - - await appChain.start( - settlementType === "proven", - container.createChildContainer() - ); + const result = await trigger.produceBlockAndBatch(); + const [block, batch] = result; - settlementModule = appChain.sequencer.resolve( - "SettlementModule" - ) as SettlementModule; - bridgingModule = appChain.sequencer.resolve( - "BridgingModule" - ) as BridgingModule; + console.log( + `block ${block?.height.toString()} ${block?.fromMessagesHash.toString()} -> ${block?.toMessagesHash.toString()}` + ); + const proof = await blockSerializer + .getBlockProofSerializer() + .fromJSONProof(batch!.proof); + console.log( + `block ${proof.publicInput.incomingMessagesHash} -> ${proof.publicOutput.incomingMessagesHash}` + ); - trigger = - appChain.sequencer.dependencyContainer.resolve( - "BlockTrigger" - ); - blockQueue = appChain.sequencer.resolve("BlockQueue") as BlockQueue; - feeStrategy = appChain.sequencer.resolve("FeeStrategy") as FeeStrategy; + return result; + } - blockSerializer = - appChain.sequencer.dependencyContainer.resolve(BlockProofSerializer); + beforeAll(async () => { + console.log("Setup"); + await sleep(100); + appChain = setupAppChain(); + console.log("Start"); + await sleep(100); - const accountService = appChain.sequencer.dependencyContainer.resolve( - MinaBlockchainAccounts - ); - const accs = await accountService.getFundedAccounts(3); - testAccounts = accs.slice(1); + await appChain.start( + settlementType === "proven", + container.createChildContainer() + ); - console.log( - `Funding ${sequencerKey.toPublicKey().toBase58()} from ${accs[0].toPublicKey().toBase58()}` - ); + settlementModule = appChain.sequencer.resolve( + "SettlementModule" + ) as SettlementModule; + bridgingModule = appChain.sequencer.resolve( + "BridgingModule" + ) as BridgingModule; - await accountService.fundAccountFrom( - accs[0], - sequencerKey.toPublicKey(), - 20 * 1e9 + trigger = + appChain.sequencer.dependencyContainer.resolve( + "BlockTrigger" ); - }, timeout); - - afterAll(() => { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - SettlementSmartContractBase.args = undefined as any; - }); + blockQueue = appChain.sequencer.resolve("BlockQueue") as BlockQueue; + feeStrategy = appChain.sequencer.resolve("FeeStrategy") as FeeStrategy; - let nonceCounter = 0; - let user0Nonce = 0; - let acc0L2Nonce = 0; + blockSerializer = + appChain.sequencer.dependencyContainer.resolve(BlockProofSerializer); - it( - "should deploy", - async () => { - // Deploy contract - await settlementModule.deploy( - settlementKey, - dispatchKey, - minaBridgeKey, - { - nonce: nonceCounter, - } - ); - - nonceCounter += 2; - - console.log("Deployed"); - }, - timeout * 2 + const accountService = appChain.sequencer.dependencyContainer.resolve( + MinaBlockchainAccounts ); + console.log("GetFunded"); + await sleep(100); + const accs = await accountService.getFundedAccounts(3); + testAccounts = accs.slice(1); - if (tokenConfig !== undefined) { - it( - "should deploy custom token owner", - async () => { - const permissions = - settlementType === "signed" - ? new SignedSettlementPermissions() - : new ProvenSettlementPermissions(); - - const tx = await Mina.transaction( - { - sender: sequencerKey.toPublicKey(), - memo: "Deploy custom token", - nonce: nonceCounter++, - fee: feeStrategy.getFee(), - }, - async () => { - AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 2); - - const admin = new FungibleTokenAdmin( - tokenOwnerKey.admin.toPublicKey() - ); - await admin.deploy({ - verificationKey: undefined, - adminPublicKey: sequencerKey.toPublicKey(), - }); - admin.self.account.permissions.set( - permissions.bridgeContractToken() - ); - - await tokenOwner!.deploy({ - verificationKey: undefined, - src: "", - symbol: "TEST", - }); - tokenOwner!.self.account.permissions.set( - permissions.bridgeContractToken() - ); - } - ); - console.log(tx.toPretty()); + console.log( + `Funding ${sequencerKey.toPublicKey().toBase58()} from ${accs[0].toPublicKey().toBase58()}` + ); - settlementModule.utils.signTransaction( - tx, - [sequencerKey, tokenOwnerKey.tokenOwner, tokenOwnerKey.admin], - [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] - ); + console.log("FundAccount"); + await sleep(100); + await accountService.fundAccountFrom( + accs[0], + sequencerKey.toPublicKey(), + 20 * 1e9 + ); - await appChain.sequencer - .resolveOrFail("TransactionSender", MinaTransactionSender) - .proveAndSendTransaction(tx, "included"); - }, - timeout - ); + console.log("beforeAll finished"); + }, timeout); - it( - "should initialize custom token", - async () => { - const tx = await Mina.transaction( - { - sender: sequencerKey.toPublicKey(), - memo: "Initialized custom token owner", - nonce: nonceCounter++, - fee: feeStrategy.getFee(), - }, - async () => { - AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 1); - - await tokenOwner!.initialize( - tokenOwnerKey.admin.toPublicKey(), - UInt8.from(9), - Bool(false) - ); - } - ); - console.log(tx.toPretty()); - settlementModule.utils.signTransaction( - tx, - [sequencerKey, tokenOwnerKey.tokenOwner, tokenOwnerKey.admin], - [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] - ); + afterAll(async () => { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + SettlementSmartContractBase.args = undefined as any; - await appChain.sequencer - .resolveOrFail("TransactionSender", MinaTransactionSender) - .proveAndSendTransaction(tx, "included"); - }, - timeout - ); + await appChain.close(); - it( - "should mint custom tokens", - async () => { - await settlementModule.utils.fetchContractAccounts( - { - address: tokenOwner!.address, - tokenId: tokenOwner!.tokenId, - }, - { - address: tokenOwner!.address, - tokenId: tokenOwner!.deriveTokenId(), - } - ); + console.log("Afterall"); + }); - const tx = await Mina.transaction( - { - sender: sequencerKey.toPublicKey(), - memo: "Mint custom token", - nonce: nonceCounter++, - fee: feeStrategy.getFee(), - }, - async () => { - AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 1); - - await tokenOwner!.mint( - testAccounts[0].toPublicKey(), - UInt64.from(100e9) - ); - // tokenOwner!.self.body.incrementNonce = Bool(false); - } - ); - settlementModule.utils.signTransaction( - tx, - [sequencerKey], - [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] - ); + let nonceCounter = 0; + let user0Nonce = 0; + let acc0L2Nonce = 0; - await appChain.sequencer - .resolveOrFail("TransactionSender", MinaTransactionSender) - .proveAndSendTransaction(tx, "included"); - }, - timeout - ); + it( + "should deploy", + async () => { + // Deploy contract + await settlementModule.deploy(settlementKey, dispatchKey, minaBridgeKey, { + nonce: nonceCounter, + }); - it( - "should deploy custom token bridge", - async () => { - await settlementModule.deployTokenBridge( - tokenOwner!, - tokenOwnerKey.tokenOwner, - tokenBridgeKey, - { - nonce: nonceCounter++, - } - ); - console.log( - `Token bridge address: ${tokenBridgeKey.toPublicKey().toBase58()} @ ${tokenOwner!.deriveTokenId().toString()}` - ); + nonceCounter += 2; - expect(tokenOwner!.deriveTokenId().toString()).toStrictEqual( - bridgedTokenId.toString() - ); - }, - timeout - ); - } + console.log("Deployed"); + }, + timeout * 2 + ); + if (tokenConfig !== undefined) { it( - "should settle", + "should deploy custom token owner", async () => { - try { - const [, batch] = await createBatch(true); - acc0L2Nonce++; + const permissions = + settlementType === "signed" + ? new SignedSettlementPermissions() + : new ProvenSettlementPermissions(); - const input = BlockProverPublicInput.fromFields( - batch!.proof.publicInput.map((x) => Field(x)) - ); - expect(input.stateRoot.toBigInt()).toStrictEqual( - RollupMerkleTree.EMPTY_ROOT - ); - - const lastBlock = await blockQueue.getLatestBlockAndResult(); + const tx = await Mina.transaction( + { + sender: sequencerKey.toPublicKey(), + memo: "Deploy custom token", + nonce: nonceCounter++, + fee: feeStrategy.getFee(), + }, + async () => { + AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 2); - await trigger.settle(batch!); - nonceCounter++; + const admin = new FungibleTokenAdmin( + tokenOwnerKey.admin.toPublicKey() + ); + await admin.deploy({ + verificationKey: undefined, + adminPublicKey: sequencerKey.toPublicKey(), + }); + admin.self.account.permissions.set( + permissions.bridgeContractToken() + ); - // TODO Check Smartcontract tx layout (call to dispatch with good preconditions, etc) + await tokenOwner!.deploy({ + verificationKey: undefined, + src: "", + symbol: "TEST", + }); + tokenOwner!.self.account.permissions.set( + permissions.bridgeContractToken() + ); + } + ); + console.log(tx.toPretty()); - console.log("Block settled"); + settlementModule.utils.signTransaction( + tx, + [sequencerKey, tokenOwnerKey.tokenOwner, tokenOwnerKey.admin], + [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] + ); - const { settlement } = settlementModule.getContracts(); - expectDefined(lastBlock); - expectDefined(lastBlock.result); - expect(settlement.networkStateHash.get().toBigInt()).toStrictEqual( - lastBlock!.result.afterNetworkState.hash().toBigInt() - ); - expect(settlement.stateRoot.get().toBigInt()).toStrictEqual( - lastBlock!.result.stateRoot - ); - expect(settlement.blockHashRoot.get().toBigInt()).toStrictEqual( - lastBlock!.result.blockHashRoot - ); - } catch (e) { - console.error(e); - throw e; - } + await appChain.sequencer + .resolveOrFail("TransactionSender", MinaTransactionSender) + .proveAndSendTransaction(tx, "included"); }, timeout ); it( - "should include deposit", + "should initialize custom token", async () => { - try { - const { settlement, dispatch } = settlementModule.getContracts(); - const bridge = new BridgeContract( - tokenBridgeKey.toPublicKey(), - bridgedTokenId - ); - - const userKey = testAccounts[0]; - - const depositAmount = 10n * BigInt(1e9); + const tx = await Mina.transaction( + { + sender: sequencerKey.toPublicKey(), + memo: "Initialized custom token owner", + nonce: nonceCounter++, + fee: feeStrategy.getFee(), + }, + async () => { + AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 1); - const contractBalanceBefore = bridge.account.balance.get(); - const userL2BalanceBefore = - await appChain.query.runtime.Balances.balances.get( - BalancesKey.from(bridgedTokenId, userKey.toPublicKey()) + await tokenOwner!.initialize( + tokenOwnerKey.admin.toPublicKey(), + UInt8.from(9), + Bool(false) ); + } + ); + console.log(tx.toPretty()); + settlementModule.utils.signTransaction( + tx, + [sequencerKey, tokenOwnerKey.tokenOwner, tokenOwnerKey.admin], + [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] + ); - const tree = await TokenBridgeTree.buildTreeFromEvents(dispatch); - const index = tree.getIndex(bridgedTokenId); - const attestation = new TokenBridgeAttestation({ - index: Field(index), - witness: tree.getWitness(index), - }); - - const tx = await Mina.transaction( - { - sender: userKey.toPublicKey(), - fee: 0.01 * 1e9, - nonce: user0Nonce++, - memo: "deposit", - }, - async () => { - const au = AccountUpdate.createSigned( - userKey.toPublicKey(), - bridgedTokenId - ); - au.balance.subInPlace(UInt64.from(depositAmount)); - - await dispatch.deposit( - UInt64.from(depositAmount), - bridgedTokenId, - tokenBridgeKey.toPublicKey(), - attestation, - userKey.toPublicKey() - ); - - if (tokenConfig !== undefined) { - await tokenOwner!.approveAccountUpdates([au, dispatch.self]); - } - } - ); - - settlementModule.utils.signTransaction( - tx, - [userKey], - [tokenOwnerKey.tokenOwner, dispatchKey] - ); - - await appChain.sequencer - .resolveOrFail("TransactionSender", MinaTransactionSender) - .proveAndSendTransaction(tx, "included"); - - const actions = await Mina.fetchActions(dispatch.address); - const balanceDiff = bridge.account.balance - .get() - .sub(contractBalanceBefore); - - expect(actions).toHaveLength(1); - expect(balanceDiff.toBigInt()).toBe(depositAmount); - - const [, batch] = await createBatch(false); - - console.log("Settling"); - - await trigger.settle(batch!); - nonceCounter++; - - const [, batch2] = await createBatch(false); - - const networkstateHash = Mina.activeInstance.getAccount( - settlement.address - ); - console.log("On-chain values"); - console.log( - networkstateHash.zkapp!.appState.map((x) => x.toString()) - ); - - console.log( - `Empty Network State ${NetworkState.empty().hash().toString()}` - ); - console.log(batch!.toNetworkState.hash().toString()); - console.log(batch2!.fromNetworkState.hash().toString()); - - expect(batch!.toNetworkState.hash().toString()).toStrictEqual( - batch2!.fromNetworkState.hash().toString() - ); - - expect(batch2!.blockHashes).toHaveLength(1); + await appChain.sequencer + .resolveOrFail("TransactionSender", MinaTransactionSender) + .proveAndSendTransaction(tx, "included"); + }, + timeout + ); - await trigger.settle(batch2!); - nonceCounter++; + it( + "should mint custom tokens", + async () => { + await settlementModule.utils.fetchContractAccounts( + { + address: tokenOwner!.address, + tokenId: tokenOwner!.tokenId, + }, + { + address: tokenOwner!.address, + tokenId: tokenOwner!.deriveTokenId(), + } + ); - const balance = await appChain.query.runtime.Balances.balances.get( - BalancesKey.from(bridgedTokenId, userKey.toPublicKey()) - ); + const tx = await Mina.transaction( + { + sender: sequencerKey.toPublicKey(), + memo: "Mint custom token", + nonce: nonceCounter++, + fee: feeStrategy.getFee(), + }, + async () => { + AccountUpdate.fundNewAccount(sequencerKey.toPublicKey(), 1); - expectDefined(balance); + await tokenOwner!.mint( + testAccounts[0].toPublicKey(), + UInt64.from(100e9) + ); + // tokenOwner!.self.body.incrementNonce = Bool(false); + } + ); + settlementModule.utils.signTransaction( + tx, + [sequencerKey], + [tokenOwnerKey.tokenOwner, tokenOwnerKey.admin] + ); - const l2balanceDiff = balance.sub( - userL2BalanceBefore ?? UInt64.from(0) - ); - expect(l2balanceDiff.toBigInt()).toStrictEqual(depositAmount); - } catch (e) { - console.error(e); - throw e; - } + await appChain.sequencer + .resolveOrFail("TransactionSender", MinaTransactionSender) + .proveAndSendTransaction(tx, "included"); }, timeout ); it( - "should process withdrawal", + "should deploy custom token bridge", async () => { - const bridgingContract = - await bridgingModule.getBridgeContract(bridgedTokenId); + await settlementModule.deployTokenBridge( + tokenOwner!, + tokenOwnerKey.tokenOwner, + tokenBridgeKey, + { + nonce: nonceCounter++, + } + ); + console.log( + `Token bridge address: ${tokenBridgeKey.toPublicKey().toBase58()} @ ${tokenOwner!.deriveTokenId().toString()}` + ); - const userKey = testAccounts[0]; + expect(tokenOwner!.deriveTokenId().toString()).toStrictEqual( + bridgedTokenId.toString() + ); + }, + timeout + ); + } - const withdrawAmount = 10 * 1e9; + it( + "should settle", + async () => { + try { + const [, batch] = await createBatch(true); + acc0L2Nonce++; - const withdrawalTx = createTransaction({ - runtime: appChain.runtime, - method: ["Withdrawals", "withdraw"], - args: [ - userKey.toPublicKey(), - UInt64.from(withdrawAmount), - bridgedTokenId, - ], - nonce: acc0L2Nonce + 1, - privateKey: userKey, - }); - const [block, batch] = await createBatch(true, acc0L2Nonce, [ - withdrawalTx, - ]); - acc0L2Nonce += 2; + const input = BlockProverPublicInput.fromFields( + batch!.proof.publicInput.map((x) => Field(x)) + ); + expect(input.stateRoot.toBigInt()).toStrictEqual( + RollupMerkleTree.EMPTY_ROOT + ); - console.log("Test networkstate"); - console.log(NetworkState.toJSON(block!.networkState.during)); - console.log(NetworkState.toJSON(batch!.toNetworkState)); + const lastBlock = await blockQueue.getLatestBlockAndResult(); await trigger.settle(batch!); nonceCounter++; - const txs = await bridgingModule.sendRollupTransactions({ - nonce: nonceCounter, - bridgingContractPrivateKey: tokenBridgeKey, - tokenOwnerPrivateKey: tokenOwnerKey.tokenOwner, - tokenOwner: tokenOwner, - }); - - nonceCounter += 2; + // TODO Check Smartcontract tx layout (call to dispatch with good preconditions, etc) - expect(txs).toHaveLength(1); + console.log("Block settled"); - if (baseLayerConfig.network.type !== "local") { - await fetchAccount({ - publicKey: userKey.toPublicKey(), - tokenId: bridgingContract.deriveTokenId(), - }); - } - const account = Mina.getAccount( - userKey.toPublicKey(), - bridgingContract.deriveTokenId() + const { settlement } = settlementModule.getContracts(); + expectDefined(lastBlock); + expectDefined(lastBlock.result); + expect(settlement.networkStateHash.get().toBigInt()).toStrictEqual( + lastBlock!.result.afterNetworkState.hash().toBigInt() ); - - expect(account.balance.toBigInt()).toStrictEqual( - BigInt(withdrawAmount) + expect(settlement.stateRoot.get().toBigInt()).toStrictEqual( + lastBlock!.result.stateRoot + ); + expect(settlement.blockHashRoot.get().toBigInt()).toStrictEqual( + lastBlock!.result.blockHashRoot + ); + } catch (e) { + console.error(e); + throw e; + } + }, + timeout + ); + + it( + "should include deposit", + async () => { + try { + const { settlement, dispatch } = settlementModule.getContracts(); + const bridge = new BridgeContract( + tokenBridgeKey.toPublicKey(), + bridgedTokenId ); - }, - timeout * 2 - ); - - it( - "should be able to redeem withdrawal", - async () => { - const bridgingContract = - await bridgingModule.getBridgeContract(bridgedTokenId); const userKey = testAccounts[0]; - // Mina token test case - if (baseLayerConfig.network.type !== "local") { - await fetchAccount({ - publicKey: userKey.toPublicKey(), - tokenId: bridgedTokenId, - }); - } - const balanceBefore = Mina.getAccount( - userKey.toPublicKey(), - bridgedTokenId - ).balance.toBigInt(); + const depositAmount = 10n * BigInt(1e9); + + const contractBalanceBefore = bridge.account.balance.get(); + const userL2BalanceBefore = + await appChain.query.runtime.Balances.balances.get( + BalancesKey.from(bridgedTokenId, userKey.toPublicKey()) + ); - const amount = BigInt(1e9 * 10); + const tree = await TokenBridgeTree.buildTreeFromEvents(dispatch); + const index = tree.getIndex(bridgedTokenId); + const attestation = new TokenBridgeAttestation({ + index: Field(index), + witness: tree.getWitness(index), + }); - const fee = feeStrategy.getFee(); const tx = await Mina.transaction( { sender: userKey.toPublicKey(), + fee: 0.01 * 1e9, nonce: user0Nonce++, - fee, - memo: "Redeem withdrawal", + memo: "deposit", }, async () => { - const mintAU = AccountUpdate.createSigned( + const au = AccountUpdate.createSigned( userKey.toPublicKey(), bridgedTokenId ); - mintAU.balance.addInPlace(amount); - await bridgingContract.redeem(mintAU); + au.balance.subInPlace(UInt64.from(depositAmount)); + + await dispatch.deposit( + UInt64.from(depositAmount), + bridgedTokenId, + tokenBridgeKey.toPublicKey(), + attestation, + userKey.toPublicKey() + ); - // Approve AUs if necessary if (tokenConfig !== undefined) { - await tokenOwner!.approveAccountUpdate(bridgingContract.self); + await tokenOwner!.approveAccountUpdates([au, dispatch.self]); } } ); - const signed = settlementModule.utils.signTransaction( + settlementModule.utils.signTransaction( tx, [userKey], - [tokenBridgeKey, tokenOwnerKey.tokenOwner] + [tokenOwnerKey.tokenOwner, dispatchKey] ); await appChain.sequencer .resolveOrFail("TransactionSender", MinaTransactionSender) - .proveAndSendTransaction(signed, "included"); + .proveAndSendTransaction(tx, "included"); - if (baseLayerConfig.network.type !== "local") { - await fetchAccount({ - publicKey: userKey.toPublicKey(), - tokenId: bridgedTokenId, - }); - } - const balanceAfter = Mina.getAccount( - userKey.toPublicKey(), - bridgedTokenId - ).balance.toBigInt(); + const actions = await Mina.fetchActions(dispatch.address); + const balanceDiff = bridge.account.balance + .get() + .sub(contractBalanceBefore); + + expect(actions).toHaveLength(1); + expect(balanceDiff.toBigInt()).toBe(depositAmount); + + const [, batch] = await createBatch(false); + + console.log("Settling"); - // tx fee - const minaFees = BigInt(fee); + await trigger.settle(batch!); + nonceCounter++; + + const [, batch2] = await createBatch(false); - expect(balanceAfter - balanceBefore).toBe( - amount - (tokenConfig === undefined ? minaFees : 0n) + const networkstateHash = Mina.activeInstance.getAccount( + settlement.address ); - }, - timeout - ); - } + console.log("On-chain values"); + console.log(networkstateHash.zkapp!.appState.map((x) => x.toString())); + + console.log( + `Empty Network State ${NetworkState.empty().hash().toString()}` + ); + console.log(batch!.toNetworkState.hash().toString()); + console.log(batch2!.fromNetworkState.hash().toString()); + + expect(batch!.toNetworkState.hash().toString()).toStrictEqual( + batch2!.fromNetworkState.hash().toString() + ); + + expect(batch2!.blockHashes).toHaveLength(1); + + await trigger.settle(batch2!); + nonceCounter++; + + const balance = await appChain.query.runtime.Balances.balances.get( + BalancesKey.from(bridgedTokenId, userKey.toPublicKey()) + ); + + expectDefined(balance); + + const l2balanceDiff = balance.sub( + userL2BalanceBefore ?? UInt64.from(0) + ); + expect(l2balanceDiff.toBigInt()).toStrictEqual(depositAmount); + } catch (e) { + console.error(e); + throw e; + } + }, + timeout + ); + + it( + "should process withdrawal", + async () => { + const bridgingContract = + await bridgingModule.getBridgeContract(bridgedTokenId); + + const userKey = testAccounts[0]; + + const withdrawAmount = 10 * 1e9; + + const withdrawalTx = createTransaction({ + runtime: appChain.runtime, + method: ["Withdrawals", "withdraw"], + args: [ + userKey.toPublicKey(), + UInt64.from(withdrawAmount), + bridgedTokenId, + ], + nonce: acc0L2Nonce + 1, + privateKey: userKey, + }); + const [block, batch] = await createBatch(true, acc0L2Nonce, [ + withdrawalTx, + ]); + acc0L2Nonce += 2; + + console.log("Test networkstate"); + console.log(NetworkState.toJSON(block!.networkState.during)); + console.log(NetworkState.toJSON(batch!.toNetworkState)); + + await trigger.settle(batch!); + nonceCounter++; + + const txs = await bridgingModule.sendRollupTransactions({ + nonce: nonceCounter, + bridgingContractPrivateKey: tokenBridgeKey, + tokenOwnerPrivateKey: tokenOwnerKey.tokenOwner, + tokenOwner: tokenOwner, + }); + + nonceCounter += 2; + + expect(txs).toHaveLength(1); + + if (baseLayerConfig.network.type !== "local") { + await fetchAccount({ + publicKey: userKey.toPublicKey(), + tokenId: bridgingContract.deriveTokenId(), + }); + } + const account = Mina.getAccount( + userKey.toPublicKey(), + bridgingContract.deriveTokenId() + ); + + expect(account.balance.toBigInt()).toStrictEqual(BigInt(withdrawAmount)); + }, + timeout * 2 + ); + + it( + "should be able to redeem withdrawal", + async () => { + const bridgingContract = + await bridgingModule.getBridgeContract(bridgedTokenId); + + const userKey = testAccounts[0]; + + // Mina token test case + if (baseLayerConfig.network.type !== "local") { + await fetchAccount({ + publicKey: userKey.toPublicKey(), + tokenId: bridgedTokenId, + }); + } + const balanceBefore = Mina.getAccount( + userKey.toPublicKey(), + bridgedTokenId + ).balance.toBigInt(); + + const amount = BigInt(1e9 * 10); + + const fee = feeStrategy.getFee(); + const tx = await Mina.transaction( + { + sender: userKey.toPublicKey(), + nonce: user0Nonce++, + fee, + memo: "Redeem withdrawal", + }, + async () => { + const mintAU = AccountUpdate.createSigned( + userKey.toPublicKey(), + bridgedTokenId + ); + mintAU.balance.addInPlace(amount); + await bridgingContract.redeem(mintAU); + + // Approve AUs if necessary + if (tokenConfig !== undefined) { + await tokenOwner!.approveAccountUpdate(bridgingContract.self); + } + } + ); + + const signed = settlementModule.utils.signTransaction( + tx, + [userKey], + [tokenBridgeKey, tokenOwnerKey.tokenOwner] + ); + + await appChain.sequencer + .resolveOrFail("TransactionSender", MinaTransactionSender) + .proveAndSendTransaction(signed, "included"); + + if (baseLayerConfig.network.type !== "local") { + await fetchAccount({ + publicKey: userKey.toPublicKey(), + tokenId: bridgedTokenId, + }); + } + const balanceAfter = Mina.getAccount( + userKey.toPublicKey(), + bridgedTokenId + ).balance.toBigInt(); + + // tx fee + const minaFees = BigInt(fee); + + expect(balanceAfter - balanceBefore).toBe( + amount - (tokenConfig === undefined ? minaFees : 0n) + ); + }, + timeout + ); }; /* eslint-enable no-inner-declarations */ diff --git a/packages/sequencer/test/state/state/CachedStateService.test.ts b/packages/sequencer/test/state/state/CachedStateService.test.ts index e4fda9fb2..d076c154b 100644 --- a/packages/sequencer/test/state/state/CachedStateService.test.ts +++ b/packages/sequencer/test/state/state/CachedStateService.test.ts @@ -25,8 +25,6 @@ describe("cachedStateService", () => { }); it("should preload through multiple layers of services", async () => { - await mask2.preloadKey(Field(5)); - const record = await mask2.get(Field(5)); expectDefined(record); @@ -51,11 +49,8 @@ describe("cachedStateService", () => { }); it("should delete correctly through multiple layers of services", async () => { - await mask2.preloadKey(Field(5)); - await mask2.set(Field(5), undefined); - await mask1.preloadKey(Field(5)); await expect(mask1.get(Field(5))).resolves.toHaveLength(2); await mask2.mergeIntoParent();