diff --git a/.github/workflows/pull-request-develop.yml b/.github/workflows/pull-request-develop.yml index 933c037b8..77ea12c2c 100644 --- a/.github/workflows/pull-request-develop.yml +++ b/.github/workflows/pull-request-develop.yml @@ -113,7 +113,7 @@ jobs: image: redis:6.2-alpine ports: - 6379:6379 - lighnet: + lightnet: image: o1labs/mina-local-network:compatible-latest-lightnet env: RUN_ARCHIVE_NODE: true @@ -126,6 +126,15 @@ jobs: - 8181:8181 # archive endpoints - 8282:8282 + minio: + # fixme: let's not depend on external unofficial image + image: lazybit/minio + ports: + - 9000:9000 + env: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + options: --name=minio --health-cmd "curl http://localhost:9000/minio/health/live" steps: - uses: actions/checkout@v3 @@ -151,4 +160,4 @@ jobs: polling-interval-ms: 5000 - name: "Integration tests" - run: npm run test:integration \ No newline at end of file + run: npm run test:integration diff --git a/package-lock.json b/package-lock.json index f5626d39d..3d20a5e90 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8094,6 +8094,13 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/@zxing/text-encoding": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz", + "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==", + "license": "(Unlicense OR Apache-2.0)", + "optional": true + }, "node_modules/abbrev": { "version": "2.0.0", "license": "ISC", @@ -8600,7 +8607,6 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", - "dev": true, "license": "MIT", "dependencies": { "possible-typed-array-names": "^1.0.0" @@ -8942,6 +8948,15 @@ "license": "MIT", "peer": true }, + "node_modules/block-stream2": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/block-stream2/-/block-stream2-2.1.0.tgz", + "integrity": "sha512-suhjmLI57Ewpmq00qaygS8UgEq2ly2PCItenIyhMqVjo4t4pGzqMvfgJuX8iWTeSDdfSSqS6j38fL4ToNL7Pfg==", + "license": "MIT", + "dependencies": { + "readable-stream": "^3.4.0" + } + }, "node_modules/body": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/body/-/body-5.1.0.tgz", @@ -9051,6 +9066,12 @@ "node": ">=8" } }, + "node_modules/browser-or-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-2.1.1.tgz", + "integrity": "sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==", + "license": "MIT" + }, "node_modules/browserslist": { "version": "4.23.1", "funding": [ @@ -9122,6 +9143,15 @@ "ieee754": "^1.1.13" } }, + "node_modules/buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/buffer-from": { "version": "1.1.2", "license": "MIT" @@ -9347,7 +9377,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.0", @@ -10763,7 +10792,6 @@ }, "node_modules/define-data-property": { "version": "1.1.4", - "dev": true, "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", @@ -12532,6 +12560,24 @@ "fast-decode-uri-component": "^1.0.1" } }, + "node_modules/fast-xml-parser": { + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz", + "integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.1.1" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, "node_modules/fastq": { "version": "1.17.1", "license": "ISC", @@ -12850,7 +12896,6 @@ "version": "0.3.5", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", - "dev": true, "license": "MIT", "dependencies": { "is-callable": "^1.2.7" @@ -14525,7 +14570,6 @@ }, "node_modules/has-property-descriptors": { "version": "1.0.2", - "dev": true, "license": "MIT", "dependencies": { "es-define-property": "^1.0.0" @@ -15881,6 +15925,22 @@ "node": ">=0.10.0" } }, + "node_modules/is-arguments": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", + "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-array-buffer": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", @@ -15980,7 +16040,6 @@ }, "node_modules/is-callable": { "version": "1.2.7", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -16370,7 +16429,6 @@ "version": "1.1.15", "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", - "dev": true, "license": "MIT", "dependencies": { "which-typed-array": "^1.1.16" @@ -20573,6 +20631,91 @@ "node": ">= 6" } }, + "node_modules/minio": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/minio/-/minio-8.0.6.tgz", + "integrity": "sha512-sOeh2/b/XprRmEtYsnNRFtOqNRTPDvYtMWh+spWlfsuCV/+IdxNeKVUMKLqI7b5Dr07ZqCPuaRGU/rB9pZYVdQ==", + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.4", + "block-stream2": "^2.1.0", + "browser-or-node": "^2.1.1", + "buffer-crc32": "^1.0.0", + "eventemitter3": "^5.0.1", + "fast-xml-parser": "^4.4.1", + "ipaddr.js": "^2.0.1", + "lodash": "^4.17.21", + "mime-types": "^2.1.35", + "query-string": "^7.1.3", + "stream-json": "^1.8.0", + "through2": "^4.0.2", + "web-encoding": "^1.1.5", + "xml2js": "^0.5.0 || ^0.6.2" + }, + "engines": { + "node": "^16 || ^18 || >=20" + } + }, + "node_modules/minio/node_modules/decode-uri-component": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", + "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/minio/node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "license": "MIT" + }, + "node_modules/minio/node_modules/filter-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", + "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/minio/node_modules/ipaddr.js": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.3.0.tgz", + "integrity": "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/minio/node_modules/query-string": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", + "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "license": "MIT", + "dependencies": { + "decode-uri-component": "^0.2.2", + "filter-obj": "^1.1.0", + "split-on-first": "^1.0.0", + "strict-uri-encode": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minio/node_modules/split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/minipass": { "version": "7.1.2", "license": "ISC", @@ -23307,7 +23450,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -24394,7 +24536,6 @@ }, "node_modules/readable-stream": { "version": "3.6.2", - "dev": true, "license": "MIT", "dependencies": { "inherits": "^2.0.3", @@ -24890,6 +25031,15 @@ "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", "optional": true }, + "node_modules/sax": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.4.tgz", + "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=11.0.0" + } + }, "node_modules/scheduler": { "version": "0.23.2", "license": "MIT", @@ -25052,7 +25202,6 @@ }, "node_modules/set-function-length": { "version": "1.2.2", - "dev": true, "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", @@ -25754,15 +25903,38 @@ "node": ">= 0.4" } }, + "node_modules/stream-chain": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/stream-chain/-/stream-chain-2.2.5.tgz", + "integrity": "sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA==", + "license": "BSD-3-Clause" + }, + "node_modules/stream-json": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/stream-json/-/stream-json-1.9.1.tgz", + "integrity": "sha512-uWkjJ+2Nt/LO9Z/JyKZbMusL8Dkh97uUBTv3AJQ74y07lVahLY4eEFsPsE97pxYBwr8nnjMAIch5eqI0gPShyw==", + "license": "BSD-3-Clause", + "dependencies": { + "stream-chain": "^2.2.5" + } + }, "node_modules/streamsearch": { "version": "1.1.0", "engines": { "node": ">=10.0.0" } }, + "node_modules/strict-uri-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", + "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/string_decoder": { "version": "1.3.0", - "dev": true, "license": "MIT", "dependencies": { "safe-buffer": "~5.2.0" @@ -26011,6 +26183,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, "node_modules/strong-log-transformer": { "version": "2.1.0", "dev": true, @@ -26534,7 +26718,6 @@ }, "node_modules/through2": { "version": "4.0.2", - "dev": true, "license": "MIT", "dependencies": { "readable-stream": "3" @@ -27639,6 +27822,19 @@ } } }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "license": "MIT" @@ -27800,6 +27996,18 @@ "defaults": "^1.0.3" } }, + "node_modules/web-encoding": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz", + "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==", + "license": "MIT", + "dependencies": { + "util": "^0.12.3" + }, + "optionalDependencies": { + "@zxing/text-encoding": "0.9.0" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "license": "BSD-2-Clause" @@ -27922,7 +28130,6 @@ "version": "1.1.19", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", - "dev": true, "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", @@ -28221,6 +28428,28 @@ } } }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "license": "MIT", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "license": "MIT", + "engines": { + "node": ">=4.0" + } + }, "node_modules/xtend": { "version": "4.0.2", "license": "MIT", @@ -28440,6 +28669,7 @@ "version": "0.1.1-develop.833+397881ed", "license": "MIT", "dependencies": { + "cachedir": "^2.4.0", "lodash": "^4.17.21", "loglevel": "^1.8.1", "reflect-metadata": "^0.1.13", @@ -28455,6 +28685,15 @@ "tsyringe": "^4.10.0" } }, + "packages/common/node_modules/cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "packages/deployment": { "name": "@proto-kit/deployment", "version": "0.1.1-develop.833+397881ed", @@ -28462,11 +28701,13 @@ "dependencies": { "@types/yargs": "^17.0.29", "loglevel": "^1.8.1", + "minio": "^8.0.6", "reflect-metadata": "^0.1.13", "yargs": "^17.7.2" }, "devDependencies": { - "@jest/globals": "^29.5.0" + "@jest/globals": "^29.5.0", + "cachedir": "^2.4.0" }, "peerDependencies": { "@proto-kit/common": "*", @@ -28478,6 +28719,16 @@ "tsyringe": "^4.10.0" } }, + "packages/deployment/node_modules/cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "packages/explorer": { "name": "@proto-kit/explorer", "version": "0.1.0", @@ -28780,7 +29031,8 @@ "reflect-metadata": "^0.1.13" }, "devDependencies": { - "@jest/globals": "^29.5.0" + "@jest/globals": "^29.5.0", + "cachedir": "^2.4.0" }, "peerDependencies": { "@proto-kit/api": "*", @@ -28795,6 +29047,16 @@ "o1js": "^2.10.0", "tsyringe": "^4.10.0" } + }, + "packages/stack/node_modules/cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } } } } diff --git a/packages/common/package.json b/packages/common/package.json index c94147b3a..2a3b8d49d 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -17,6 +17,7 @@ "access": "public" }, "dependencies": { + "cachedir": "^2.4.0", "lodash": "^4.17.21", "loglevel": "^1.8.1", "reflect-metadata": "^0.1.13", diff --git a/packages/common/src/cache/CacheManifest.ts b/packages/common/src/cache/CacheManifest.ts new file mode 100644 index 000000000..f01ae6cf5 --- /dev/null +++ b/packages/common/src/cache/CacheManifest.ts @@ -0,0 +1,57 @@ +import path from "node:path"; +import fs from "node:fs"; + +import { injectable, singleton } from "tsyringe"; +import cachedir from "cachedir"; + +@injectable() +@singleton() +export class CacheManifest { + public manifestFile(): string { + return path.format({ + dir: cachedir("o1js"), + name: "protokit-cache-manifest", + ext: "json", + }); + } + + manifestRead = false; + + manifest: string[] = []; + + private readManifest(): string[] { + const file = this.manifestFile(); + if (fs.existsSync(file)) { + return JSON.parse(fs.readFileSync(file).toString()); + } + return []; + } + + private ensureManifestRead() { + if (!this.manifestRead) { + this.manifest = this.readManifest(); + this.manifestRead = true; + } + } + + public getManifest() { + this.ensureManifestRead(); + + return this.manifest; + } + + public writeToManifest(program: string) { + this.ensureManifestRead(); + + if (!this.manifest.includes(program)) { + this.manifest.push(program); + fs.writeFileSync(this.manifestFile(), JSON.stringify(this.manifest)); + } + } + + public includes(program: string): boolean { + this.ensureManifestRead(); + + return this.manifest.includes(program); + } +} diff --git a/packages/common/src/cache/ProxyCache.ts b/packages/common/src/cache/ProxyCache.ts new file mode 100644 index 000000000..6c5a97f43 --- /dev/null +++ b/packages/common/src/cache/ProxyCache.ts @@ -0,0 +1,30 @@ +import { CacheHeader, Cache as O1Cache } from "o1js"; + +export class ProxyCache implements O1Cache { + private realCache = O1Cache.FileSystemDefault; + + private log: string[] = []; + + canWrite = true; + + debug = false; + + cacheDirectory = this.realCache.cacheDirectory; + + public read(header: CacheHeader): Uint8Array | undefined { + return this.realCache.read(header); + } + + public write(header: CacheHeader, value: Uint8Array): void { + this.log.push(header.persistentId); + return this.realCache.write(header, value); + } + + public getLog() { + return this.log; + } + + public startLog() { + this.log = []; + } +} diff --git a/packages/common/src/cache/RemoteCache.ts b/packages/common/src/cache/RemoteCache.ts new file mode 100644 index 000000000..9fc73e50d --- /dev/null +++ b/packages/common/src/cache/RemoteCache.ts @@ -0,0 +1,9 @@ +import { Readable } from "node:stream"; + +export interface RemoteCache { + storeObject(program: string, object: string, file: Readable): Promise; + + getObjects(program: string): Promise; + + readObject(program: string, object: string): Promise; +} diff --git a/packages/common/src/cache/RemoteCacheCompiler.ts b/packages/common/src/cache/RemoteCacheCompiler.ts new file mode 100644 index 000000000..71c98893a --- /dev/null +++ b/packages/common/src/cache/RemoteCacheCompiler.ts @@ -0,0 +1,131 @@ +import fs from "node:fs"; +import path from "node:path"; + +import { inject, injectable, Lifecycle, scoped } from "tsyringe"; +import cachedir from "cachedir"; + +import { log } from "../log"; +import { mapSequential } from "../utils"; +import { + CompileArtifact, + PlainZkProgram, +} from "../zkProgrammable/ZkProgrammable"; + +import { RemoteCache } from "./RemoteCache"; +import { ProxyCache } from "./ProxyCache"; +import { CacheManifest } from "./CacheManifest"; + +@injectable() +@scoped(Lifecycle.ContainerScoped) +export class RemoteCacheCompiler { + public constructor( + @inject("RemoteCache", { isOptional: true }) + private readonly remoteCache: RemoteCache | undefined, + private readonly manifest: CacheManifest + ) { + if (remoteCache === undefined) { + log.debug( + "No remote cache configured, only using local file system cache for circuits" + ); + } else { + log.debug("Using remote cache for circuit caching"); + } + } + + private getFileFromObjectName(object: string): string { + const dir = cachedir("o1js"); + + return path.format({ + dir, + base: object, + }); + } + + private async download(remoteCache: RemoteCache, name: string) { + const objects = await remoteCache.getObjects(name); + + log.debug(`Downloading ${objects.length} cached objects for ${name}`); + + await mapSequential(objects, async (object) => { + const readable = await remoteCache.readObject(name, object); + const file = this.getFileFromObjectName(object); + + const writeStream = fs.createWriteStream(file); + readable.pipe(writeStream); + + await new Promise((res) => { + writeStream.on("close", res); + }); + }); + } + + private async uploadFile( + remoteCache: RemoteCache, + program: string, + fileName: string + ) { + const file = path.resolve(cachedir("o1js"), fileName); + const readStream = fs.createReadStream(file); + await remoteCache.storeObject(program, fileName, readStream); + } + + private async upload( + remoteCache: RemoteCache, + name: string, + identifiers: string[] + ) { + await mapSequential(identifiers, async (identifier) => { + await this.uploadFile(remoteCache, name, identifier); + await this.uploadFile(remoteCache, name, `${identifier}.header`); + }); + } + + private isSRSFile(file: string): boolean { + return file.includes("srs-") || file.includes("lagrange-"); + } + + private async compileWithRemoteCache( + remoteCache: RemoteCache, + program: Pick + ) { + const { name } = program; + + if (!this.manifest.includes("srs")) { + await this.download(remoteCache, "srs"); + } + + if (!this.manifest.includes(name)) { + await this.download(remoteCache, name); + } + + const cache = new ProxyCache(); + cache.startLog(); + + const result = await program.compile({ + cache, + }); + + const files = cache.getLog(); + log.debug("Uploading files", files); + + const srsFiles = files.filter((file) => this.isSRSFile(file)); + await this.upload(remoteCache, "srs", srsFiles); + this.manifest.writeToManifest("srs"); + + const circuitFiles = files.filter((file) => !this.isSRSFile(file)); + await this.upload(remoteCache, name, circuitFiles); + this.manifest.writeToManifest(name); + + return result; + } + + public async compileWithCache( + program: Pick + ): Promise { + if (this.remoteCache !== undefined) { + return await this.compileWithRemoteCache(this.remoteCache, program); + } else { + return await program.compile(); + } + } +} diff --git a/packages/common/src/compiling/AtomicCompileHelper.ts b/packages/common/src/compiling/AtomicCompileHelper.ts index 8d881941f..47f4272af 100644 --- a/packages/common/src/compiling/AtomicCompileHelper.ts +++ b/packages/common/src/compiling/AtomicCompileHelper.ts @@ -1,3 +1,5 @@ +import { inject, injectable } from "tsyringe"; + import { AreProofsEnabled, CompileArtifact, @@ -6,6 +8,7 @@ import { import { isSubtypeOfName } from "../utils"; import { TypedClass } from "../types"; import { log } from "../log"; +import { RemoteCacheCompiler } from "../cache/RemoteCacheCompiler"; export type ArtifactRecord = Record; @@ -14,8 +17,13 @@ export type CompileTarget = { compile: () => Promise; }; +@injectable() export class AtomicCompileHelper { - public constructor(private readonly areProofsEnabled: AreProofsEnabled) {} + public constructor( + @inject("AreProofsEnabled") + private readonly areProofsEnabled: AreProofsEnabled, + private readonly remoteCacheCompiler: RemoteCacheCompiler + ) {} private compilationPromises: { [key: string]: Promise; @@ -44,7 +52,8 @@ export class AtomicCompileHelper { ) ) { log.time(`Compiling ${name}`); - this.compilationPromises[name] = contract.compile(); + this.compilationPromises[name] = + this.remoteCacheCompiler.compileWithCache(contract); newPromise = true; } else { log.debug(`Compiling ${name} - mock`); diff --git a/packages/common/src/compiling/CompileRegistry.ts b/packages/common/src/compiling/CompileRegistry.ts index a85766ffe..86e249bfa 100644 --- a/packages/common/src/compiling/CompileRegistry.ts +++ b/packages/common/src/compiling/CompileRegistry.ts @@ -1,9 +1,6 @@ -import { inject, injectable, singleton } from "tsyringe"; +import { injectable, singleton } from "tsyringe"; -import { - AreProofsEnabled, - CompileArtifact, -} from "../zkProgrammable/ZkProgrammable"; +import { CompileArtifact } from "../zkProgrammable/ZkProgrammable"; import { ArtifactRecord, @@ -19,14 +16,7 @@ import { @injectable() @singleton() export class CompileRegistry { - public constructor( - @inject("AreProofsEnabled") - private readonly areProofsEnabled: AreProofsEnabled - ) { - this.compiler = new AtomicCompileHelper(this.areProofsEnabled); - } - - private compiler: AtomicCompileHelper; + public constructor(private readonly compiler: AtomicCompileHelper) {} private artifacts: ArtifactRecord = {}; diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index fa82cc1f2..57cf8778d 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -29,3 +29,7 @@ export * from "./compiling/AtomicCompileHelper"; export * from "./compiling/CompileRegistry"; export * from "./compiling/CompilableModule"; export * from "./compiling/services/ChildVerificationKeyService"; +export * from "./cache/RemoteCache"; +export * from "./cache/RemoteCacheCompiler"; +export * from "./cache/CacheManifest"; +export * from "./cache/ProxyCache"; diff --git a/packages/common/src/trees/sparse/RollupMerkleTree.ts b/packages/common/src/trees/sparse/RollupMerkleTree.ts index 1974e7078..c701a091d 100644 --- a/packages/common/src/trees/sparse/RollupMerkleTree.ts +++ b/packages/common/src/trees/sparse/RollupMerkleTree.ts @@ -7,6 +7,17 @@ import { TypedClass } from "../../types"; import { MerkleTreeStore } from "./MerkleTreeStore"; import { InMemoryMerkleTreeStorage } from "./InMemoryMerkleTreeStorage"; +/** + * More efficient version of `maybeSwapBad` which + * reuses an intermediate variable + */ +export function maybeSwap(b: Bool, x: Field, y: Field): [Field, Field] { + const m = b.toField().mul(x.sub(y)); // b*(x - y) + const x1 = y.add(m); // y + b*(x - y) + const y2 = x.sub(m); // x - b*(x - y) = x + b*(y - x) + return [x1, y2]; +} + export class StructTemplate extends Struct({ path: Provable.Array(Field, 0), isLeft: Provable.Array(Bool, 0), @@ -22,6 +33,11 @@ export interface AbstractMerkleWitness extends StructTemplate { */ calculateRoot(hash: Field): Field; + calculateRootIncrement( + index: Field, + leaf: Field + ): [Field, AbstractMerkleWitness]; + /** * Calculates the index of the leaf node that belongs to this Witness. * @returns Index of the leaf. @@ -119,6 +135,24 @@ export interface AbstractMerkleTreeClass { * It also holds the Witness class under tree.WITNESS */ export function createMerkleTree(height: number): AbstractMerkleTreeClass { + function generateZeroes() { + const zeroes = [0n]; + for (let index = 1; index < height; index += 1) { + const previousLevel = Field(zeroes[index - 1]); + zeroes.push(Poseidon.hash([previousLevel, previousLevel]).toBigInt()); + } + return zeroes; + } + + let zeroCache: bigint[] | undefined = undefined; + + function getZeroes() { + if (zeroCache === undefined) { + zeroCache = generateZeroes(); + } + return zeroCache; + } + /** * The {@link RollupMerkleWitness} class defines a circuit-compatible base class * for [Merkle Witness'](https://computersciencewiki.org/index.php/Merkle_proof). @@ -147,7 +181,7 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { for (let index = 1; index < n; ++index) { const isLeft = this.isLeft[index - 1]; - // eslint-disable-next-line @typescript-eslint/no-use-before-define + const [left, right] = maybeSwap(isLeft, hash, this.path[index - 1]); hash = Poseidon.hash([left, right]); } @@ -155,6 +189,66 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { return hash; } + public calculateRootIncrement( + leafIndex: Field, + leaf: Field + ): [Field, RollupMerkleWitness] { + // This won't generate any constraints, since it's purely a computation on constants + const zero = getZeroes(); + + if (zero.length === 0) { + throw new Error("Zeroes not initialized"); + } + const zeroes = zero.map((x) => Field(x)); + + let hash = leaf; + const n = this.height(); + + let notDiverged = Bool(true); + const newPath = leafIndex.add(1).toBits(); + newPath.push(Bool(false)); + + const newSiblings: Field[] = []; + const newIsLefts: Bool[] = []; + + for (let index = 0; index < n - 1; ++index) { + const isLeft = this.isLeft[index]; + const sibling = this.path[index]; + + const newIsLeft = newPath[index].not(); + + // Bool(true) default for root level + let convergesNextLevel = Bool(true); + if (index < n - 2) { + convergesNextLevel = newPath[index + 1] + .equals(this.isLeft[index + 1]) + .not(); + } + + const nextSibling = Provable.if( + convergesNextLevel.and(notDiverged), + hash, + Provable.if(notDiverged, zeroes[index], sibling) + ); + + notDiverged = notDiverged.and(convergesNextLevel.not()); + + newSiblings.push(nextSibling); + newIsLefts.push(newIsLeft); + + const [left, right] = maybeSwap(isLeft, hash, sibling); + hash = Poseidon.hash([left, right]); + } + + return [ + hash, + new RollupMerkleWitness({ + isLeft: newIsLefts, + path: newSiblings, + }), + ]; + } + /** * Calculates the index of the leaf node that belongs to this Witness. * @returns Index of the leaf. @@ -215,6 +309,7 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { }); } } + return class AbstractRollupMerkleTree implements AbstractMerkleTree { public static HEIGHT = height; @@ -238,13 +333,7 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { public constructor(store: MerkleTreeStore) { this.store = store; - this.zeroes = [0n]; - for (let index = 1; index < AbstractRollupMerkleTree.HEIGHT; index += 1) { - const previousLevel = Field(this.zeroes[index - 1]); - this.zeroes.push( - Poseidon.hash([previousLevel, previousLevel]).toBigInt() - ); - } + this.zeroes = generateZeroes(); } public assertIndexRange(index: bigint) { @@ -414,14 +503,3 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { export class RollupMerkleTree extends createMerkleTree(256) {} export class RollupMerkleTreeWitness extends RollupMerkleTree.WITNESS {} - -/** - * More efficient version of `maybeSwapBad` which - * reuses an intermediate variable - */ -export function maybeSwap(b: Bool, x: Field, y: Field): [Field, Field] { - const m = b.toField().mul(x.sub(y)); // b*(x - y) - const x1 = y.add(m); // y + b*(x - y) - const y2 = x.sub(m); // x - b*(x - y) = x + b*(y - x) - return [x1, y2]; -} diff --git a/packages/common/src/utils.ts b/packages/common/src/utils.ts index 0576aacb5..27a0a036c 100644 --- a/packages/common/src/utils.ts +++ b/packages/common/src/utils.ts @@ -87,7 +87,11 @@ export function yieldSequential( array, async ([state, collectedTargets], curr, index, arr) => { const [newState, addition] = await callbackfn(state, curr, index, arr); - return [newState, collectedTargets.concat(addition)]; + // The reason we wrap this in an array here is for a special case where Target is a tuple + // or array itself. In this case, js interprets by flattening the Value in the array + // (which it does when a function (like concat) uses a spread operator and the + // input is an array) + return [newState, collectedTargets.concat([addition])]; }, [initialValue, []] ); @@ -105,6 +109,18 @@ export function mapSequential( }, Promise.resolve([])); } +export function unzip(array: [A, B][]): [A[], B[]] { + const as = array.map(([a]) => a); + const bs = array.map(([, b]) => b); + return [as, bs]; +} + +export function assertSizeOneOrTwo(arr: T[]): asserts arr is [T] | [T, T] { + if (!(arr.length === 1 || arr.length === 2)) { + throw new Error("Given array not size 1 or 2"); + } +} + /** * Computes a dummy value for the given value type. * diff --git a/packages/common/src/zkProgrammable/ZkProgrammable.ts b/packages/common/src/zkProgrammable/ZkProgrammable.ts index 3106a6f92..1736af95f 100644 --- a/packages/common/src/zkProgrammable/ZkProgrammable.ts +++ b/packages/common/src/zkProgrammable/ZkProgrammable.ts @@ -1,4 +1,11 @@ -import { ZkProgram, FlexibleProvablePure, Proof, Field, Provable } from "o1js"; +import { + ZkProgram, + FlexibleProvablePure, + Proof, + Field, + Provable, + Cache as O1Cache, +} from "o1js"; import { Memoize } from "typescript-memoize"; import { log } from "../log"; @@ -30,7 +37,14 @@ export interface Verify { } export interface Compile { - (): Promise; + (options?: { + cache?: O1Cache; + forceRecompile?: boolean; + proofsEnabled?: boolean; + withRuntimeTables?: boolean; + numChunks?: number; + lazyMode?: boolean; + }): Promise; } export interface PlainZkProgram< @@ -93,10 +107,10 @@ export const MOCK_VERIFICATION_KEY = dummyVerificationKey(); export function compileToMockable( compile: Compile, { areProofsEnabled }: AreProofsEnabled -): () => Promise { - return async () => { +): Compile { + return async (...args) => { if (areProofsEnabled) { - return await compile(); + return await compile(...args); } return { diff --git a/packages/common/test/trees/MerkleTree.test.ts b/packages/common/test/trees/MerkleTree.test.ts index 221c8d70e..b2c673eb8 100644 --- a/packages/common/test/trees/MerkleTree.test.ts +++ b/packages/common/test/trees/MerkleTree.test.ts @@ -217,4 +217,35 @@ describe.each([4, 16, 256])("cachedMerkleTree - %s", (height) => { tree.getNode(0, index); }).toThrow("Index greater than maximum leaf number"); }); + + it("witness incrementing", () => { + tree.setLeaf(0n, Field(3256)); + tree.setLeaf(1n, Field(3256)); + tree.setLeaf(2n, Field(3256)); + + const witness = tree.getWitness(3n); + + const [root, newWitness] = witness.calculateRootIncrement( + Field(3), + Field(1234) + ); + tree.setLeaf(3n, Field(1234)); + + expect(tree.getRoot().toString()).toStrictEqual(root.toString()); + expect(newWitness.calculateIndex().toString()).toStrictEqual("4"); + + const [root2, newWitness2] = newWitness.calculateRootIncrement( + Field(4), + Field(4321) + ); + tree.setLeaf(4n, Field(4321)); + + expect(tree.getRoot().toString()).toStrictEqual(root2.toString()); + expect(newWitness2.calculateIndex().toString()).toStrictEqual("5"); + + const root3 = newWitness2.calculateRoot(Field(555)); + tree.setLeaf(5n, Field(555)); + + expect(tree.getRoot().toString()).toStrictEqual(root3.toString()); + }); }); diff --git a/packages/deployment/docker/docker-compose.yml b/packages/deployment/docker/docker-compose.yml index 999c284d3..3dc4cf715 100644 --- a/packages/deployment/docker/docker-compose.yml +++ b/packages/deployment/docker/docker-compose.yml @@ -10,4 +10,6 @@ include: - ./lightnet/docker-compose.yml - - ./monitoring/docker-compose.yml \ No newline at end of file + - ./monitoring/docker-compose.yml + + - ./minio/docker-compose.yml diff --git a/packages/deployment/docker/minio/docker-compose.yml b/packages/deployment/docker/minio/docker-compose.yml new file mode 100644 index 000000000..907a57a99 --- /dev/null +++ b/packages/deployment/docker/minio/docker-compose.yml @@ -0,0 +1,16 @@ +# Copyright Broadcom, Inc. All Rights Reserved. +# SPDX-License-Identifier: APACHE-2.0 + +services: + minio: + image: quay.io/minio/minio:RELEASE.2024-12-18T13-15-44Z + command: server --console-address ":9001" /mnt/data + ports: + - '9000:9000' + - '9001:9001' + volumes: + - '../data/minio:/mnt/data' + +volumes: + minio_data: + driver: local diff --git a/packages/deployment/package.json b/packages/deployment/package.json index 94f86fcac..360e6777e 100644 --- a/packages/deployment/package.json +++ b/packages/deployment/package.json @@ -10,7 +10,8 @@ "lint": "eslint ./src ./test", "test:file": "node --experimental-vm-modules --experimental-wasm-modules ../../node_modules/jest/bin/jest.js", "test": "npm run test:file -- ./test/**", - "test:watch": "npm run test:file -- ./test/** --watch" + "test:watch": "npm run test:file -- ./test/** --watch", + "test:integration": "npm run test:file -- ./test-integration/** --runInBand" }, "main": "dist/index.js", "publishConfig": { @@ -19,6 +20,7 @@ "dependencies": { "@types/yargs": "^17.0.29", "loglevel": "^1.8.1", + "minio": "^8.0.6", "reflect-metadata": "^0.1.13", "yargs": "^17.7.2" }, @@ -32,7 +34,8 @@ "tsyringe": "^4.10.0" }, "devDependencies": { - "@jest/globals": "^29.5.0" + "@jest/globals": "^29.5.0", + "cachedir": "^2.4.0" }, "gitHead": "397881ed5d8f98f5005bcd7be7f5a12b3bc6f956" } diff --git a/packages/deployment/src/cache/S3RemoteCache.ts b/packages/deployment/src/cache/S3RemoteCache.ts new file mode 100644 index 000000000..efeca6fc8 --- /dev/null +++ b/packages/deployment/src/cache/S3RemoteCache.ts @@ -0,0 +1,92 @@ +import { Readable } from "stream"; +import { finished } from "node:stream/promises"; + +import * as Minio from "minio"; +import { RemoteCache } from "@proto-kit/common"; +import { SequencerModule, sequencerModule } from "@proto-kit/sequencer"; + +export type S3Config = { + client: Minio.ClientOptions; + bucketName: string; +}; + +const PREFIX_DELIMITER = "/"; + +@sequencerModule() +export class S3RemoteCache + extends SequencerModule + implements RemoteCache +{ + client?: Minio.Client; + + private getObjectName(program: string, object: string): string { + return `${program}${PREFIX_DELIMITER}${object}`; + } + + private assertValidObjectName(name: string) { + if (name.includes("/")) { + throw new Error("Object name can't contain slashes (/)"); + } + } + + private async ensureBucketExists() { + const bucketExists = await this.client!.bucketExists( + this.config.bucketName + ); + + if (!bucketExists) { + await this.client!.makeBucket(this.config.bucketName); + } + } + + public async storeObject( + program: string, + object: string, + file: Readable + ): Promise { + this.assertValidObjectName(object); + this.assertValidObjectName(program); + + await this.client!.putObject( + this.config.bucketName, + this.getObjectName(program, object), + file, + undefined, + {} + ); + } + + public async getObjects(program: string): Promise { + this.assertValidObjectName(program); + + const stream = this.client!.listObjectsV2( + this.config.bucketName, + program + PREFIX_DELIMITER + ); + + const results: Minio.BucketItem[] = []; + stream.on("data", (data) => { + results.push(data); + }); + + await finished(stream); + + return results.map((result) => result.name!.split("/")[1]); + } + + public async readObject(program: string, object: string): Promise { + this.assertValidObjectName(object); + this.assertValidObjectName(program); + + return await this.client!.getObject( + this.config.bucketName, + this.getObjectName(program, object) + ); + } + + async start(): Promise { + this.client = new Minio.Client(this.config.client); + + await this.ensureBucketExists(); + } +} diff --git a/packages/deployment/src/index.ts b/packages/deployment/src/index.ts index 9f8b0a609..cbc5458d5 100644 --- a/packages/deployment/src/index.ts +++ b/packages/deployment/src/index.ts @@ -1,2 +1,3 @@ export * from "./queue/BullQueue"; export * from "./environment/Environment"; +export * from "./cache/S3RemoteCache"; diff --git a/packages/deployment/test-integration/s3.test.ts b/packages/deployment/test-integration/s3.test.ts new file mode 100644 index 000000000..7d7a52de7 --- /dev/null +++ b/packages/deployment/test-integration/s3.test.ts @@ -0,0 +1,65 @@ +import * as fs from "node:fs"; + +import Minio from "minio"; +import { Field, ZkProgram } from "o1js"; +import cachedir from "cachedir"; +import { CacheManifest, log, RemoteCacheCompiler } from "@proto-kit/common"; + +import { S3RemoteCache } from "../src/cache/S3RemoteCache"; + +const program = ZkProgram({ + name: "cache-test-zkprogram", + publicInput: Field, + publicOutput: Field, + methods: { + increment: { + privateInputs: [], + async method(input: Field) { + return { publicOutput: input.add(1).add(input) }; + }, + }, + }, +}); + +describe("s3", () => { + const config: Minio.ClientOptions = { + endPoint: "localhost", + port: 9000, + useSSL: false, + accessKey: "minioadmin", + secretKey: "minioadmin", + }; + + function clearCache() { + fs.rmSync(cachedir("o1js"), { force: true, recursive: true }); + fs.mkdirSync(cachedir("o1js")); + } + + beforeAll(() => { + clearCache(); + + log.setLevel("DEBUG"); + }); + + it("should upload artifacts on first compile", async () => { + const cache = new S3RemoteCache(); + cache.config = { + client: config, + bucketName: "cache-test-bucket", + }; + await cache.start(); + + const compiler = new RemoteCacheCompiler(cache, new CacheManifest()); + + await compiler.compileWithCache(program); + + const srsObjects = await cache.getObjects("srs"); + expect(srsObjects.length).toBe(6); + + const programObjects = await cache.getObjects("cache-test-zkprogram"); + expect(programObjects.length).toBe(8); + }, 100000); + + // Not a lot else we can test this way, since o1js keeps the prover, therefore recompiling + // doesn't do anything regarding the cache... +}); diff --git a/packages/deployment/test-integration/tsconfig.json b/packages/deployment/test-integration/tsconfig.json new file mode 100644 index 000000000..a0c8c946b --- /dev/null +++ b/packages/deployment/test-integration/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "./../../../tsconfig.json", + "compilerOptions": { + "experimentalDecorators": true + }, + "include": ["./**/*.ts", "./*.ts"] +} diff --git a/packages/module/src/method/runtimeMethod.ts b/packages/module/src/method/runtimeMethod.ts index d53a9439c..ffa8145af 100644 --- a/packages/module/src/method/runtimeMethod.ts +++ b/packages/module/src/method/runtimeMethod.ts @@ -25,11 +25,6 @@ const errors = { runtimeNotProvided: (name: string) => new Error(`Runtime was not provided for module: ${name}`), - methodInputsNotProvided: () => - new Error( - "Method execution inputs not provided, provide them via context.inputs" - ), - runtimeNameNotSet: () => new Error("Runtime name was not set"), fieldNotConstant: (name: string) => diff --git a/packages/persistance/test-integration/SequencerRestart.test.ts b/packages/persistance/test-integration/SequencerRestart.test.ts index 940bc40ca..2864424b9 100644 --- a/packages/persistance/test-integration/SequencerRestart.test.ts +++ b/packages/persistance/test-integration/SequencerRestart.test.ts @@ -40,7 +40,7 @@ describe("sequencer restart", () => { }; const teardown = async () => { - await appChain.sequencer.resolve("Database").close(); + await appChain.close(); }; beforeAll(async () => { diff --git a/packages/protocol/src/index.ts b/packages/protocol/src/index.ts index 8d8cf7d9d..34e794b3d 100644 --- a/packages/protocol/src/index.ts +++ b/packages/protocol/src/index.ts @@ -22,6 +22,7 @@ export * from "./prover/accumulators/StateTransitionReductionList"; export * from "./prover/accumulators/AppliedBatchHashList"; export * from "./prover/accumulators/WitnessedRootHashList"; export * from "./prover/accumulators/TransactionHashList"; +export * from "./prover/accumulators/BlockHashList"; export * from "./prover/block/BlockProver"; export * from "./prover/block/BlockProvable"; export * from "./prover/block/accummulators/RuntimeVerificationKeyTree"; diff --git a/packages/protocol/src/protocol/ProvableBlockHook.ts b/packages/protocol/src/protocol/ProvableBlockHook.ts index 7ba6dcd32..4de723a6f 100644 --- a/packages/protocol/src/protocol/ProvableBlockHook.ts +++ b/packages/protocol/src/protocol/ProvableBlockHook.ts @@ -3,47 +3,52 @@ import { NoConfig } from "@proto-kit/common"; import { NetworkState } from "../model/network/NetworkState"; import { - BlockProverState, BlockProverPublicInput, + BlockArguments, + BlockProverState, } from "../prover/block/BlockProvable"; import { TransitioningProtocolModule } from "./TransitioningProtocolModule"; export type ProvableHookBlockState = Pick< - BlockProverPublicInput, - | "transactionsHash" - | "eternalTransactionsHash" - | "incomingMessagesHash" - | "blockHashRoot" + BlockProverPublicInput & BlockArguments, + "eternalTransactionsHash" | "incomingMessagesHash" | "blockHashRoot" >; -export function toProvableHookBlockState( +export function toBeforeBlockHookArgument( state: Pick< BlockProverState, - | "transactionList" - | "eternalTransactionsList" - | "incomingMessages" - | "blockHashRoot" + "eternalTransactionsList" | "incomingMessages" | "blockHashRoot" > ) { - const { - transactionList, - eternalTransactionsList, - incomingMessages, - blockHashRoot, - } = state; + const { eternalTransactionsList, incomingMessages, blockHashRoot } = state; return { - transactionsHash: transactionList.commitment, eternalTransactionsHash: eternalTransactionsList.commitment, incomingMessagesHash: incomingMessages.commitment, blockHashRoot, }; } +export function toAfterBlockHookArgument( + state: Pick< + BlockProverState, + "eternalTransactionsList" | "incomingMessages" | "blockHashRoot" + >, + stateRoot: Field, + transactionsHash: Field +) { + return { + ...toBeforeBlockHookArgument(state), + stateRoot, + transactionsHash, + }; +} + export interface BeforeBlockHookArguments extends ProvableHookBlockState {} export interface AfterBlockHookArguments extends BeforeBlockHookArguments { stateRoot: Field; + transactionsHash: Field; } // Purpose is to build transition from -> to network state diff --git a/packages/protocol/src/protocol/ProvableTransactionHook.ts b/packages/protocol/src/protocol/ProvableTransactionHook.ts index 68fa3bedf..43280ff3c 100644 --- a/packages/protocol/src/protocol/ProvableTransactionHook.ts +++ b/packages/protocol/src/protocol/ProvableTransactionHook.ts @@ -1,28 +1,28 @@ import { NoConfig } from "@proto-kit/common"; -import { Signature } from "o1js"; +import { Field, Signature } from "o1js"; import { RuntimeTransaction } from "../model/transaction/RuntimeTransaction"; import { NetworkState } from "../model/network/NetworkState"; import { MethodPublicOutput } from "../model/MethodPublicOutput"; import { - TransactionProverPublicInput, TransactionProverState, TransactionProverTransactionArguments, } from "../prover/transaction/TransactionProvable"; import { TransitioningProtocolModule } from "./TransitioningProtocolModule"; -export type ProvableHookTransactionState = Pick< - TransactionProverPublicInput, - "transactionsHash" | "eternalTransactionsHash" | "incomingMessagesHash" ->; +export type ProvableHookTransactionState = { + transactionsHash: Field; + eternalTransactionsHash: Field; + incomingMessagesHash: Field; +}; export function toProvableHookTransactionState( state: Pick< TransactionProverState, "transactionList" | "eternalTransactionsList" | "incomingMessages" > -) { +): ProvableHookTransactionState { const { transactionList, eternalTransactionsList, incomingMessages } = state; return { transactionsHash: transactionList.commitment, diff --git a/packages/protocol/src/prover/accumulators/BlockHashList.ts b/packages/protocol/src/prover/accumulators/BlockHashList.ts new file mode 100644 index 000000000..b0e732e65 --- /dev/null +++ b/packages/protocol/src/prover/accumulators/BlockHashList.ts @@ -0,0 +1,117 @@ +import { Field, Struct } from "o1js"; + +import { DefaultProvableHashList } from "../../utils/ProvableHashList"; +import type { TransactionProverState } from "../transaction/TransactionProvable"; +import { NetworkState } from "../../model/network/NetworkState"; + +export class BundlePreimage extends Struct({ + preimage: Field, + fromStateTransitionsHash: Field, + fromWitnessedRootsHash: Field, +}) {} + +export class FieldTransition extends Struct({ + from: Field, + to: Field, +}) {} + +/** + * A bundle represents an ordered list of transactions and their evaluated effects. + * Specifically, this includes beforeTransaction, runtime and afterTransaction evaluation, + * but not block hooks. + */ +export class Bundle extends Struct({ + // Those are per-block trackers + networkStateHash: Field, + transactionsHash: Field, + + // Those are non-linear trackers that we assert later in the blockprover + pendingSTBatchesHash: FieldTransition, + witnessedRootsHash: FieldTransition, +}) {} + +/** + * This hash list collects an ordered list of Bundle instances. + * "Pushing" onto this list can mean either appending a new bundle or updating the + * bundle at the tip of this list, according to the following rules: + * The validated preimage (via checkLastBundleElement) is: + * - == commitment: A new bundle will be appended + * - something else: The preimage is the actual preimage, therefore as a operation, + * the old one will be popped (silently) and the updates bundle will be pushed, + * resulting in an semantic update of the tip. + */ +export class BundleHashList extends DefaultProvableHashList { + public constructor( + commitment: Field = Field(0), + // TODO Refactor this into preimage and "auxiliary batch information" - this is confusing + public preimage?: BundlePreimage + ) { + super(Bundle, commitment); + } + + /** Verifies this list's preimage against the prover's state + * The main impact this function has is that it makes the preimage trusted + * i.e. we can safely use it to add to the bundle/open a new bundle + */ + public checkLastBundleElement( + state: TransactionProverState, + networkState: NetworkState + ) { + const { preimage, fromWitnessedRootsHash, fromStateTransitionsHash } = + this.preimage!; + + // Check and append to bundlelist + const lastElement = new Bundle({ + networkStateHash: networkState.hash(), + transactionsHash: state.transactionList.commitment, + pendingSTBatchesHash: { + from: fromStateTransitionsHash, + to: state.pendingSTBatches.commitment, + }, + witnessedRootsHash: { + from: fromWitnessedRootsHash, + to: state.witnessedRoots.commitment, + }, + }); + + const newBundle = this.commitment.equals(preimage); + this.witnessTip(preimage, lastElement) + .or(newBundle) + .assertTrue("Last element not valid"); + + newBundle + .implies(state.transactionList.isEmpty()) + .assertTrue("Transaction list not empty for new bundle"); + } + + /** + * This function pushes a new bundle onto this list or updates the bundle at + * the tip of this list, according to the rules of the preimage algorithms (see class docs) + */ + public addToBundle( + state: TransactionProverState, + networkState: NetworkState + ) { + const { preimage, fromWitnessedRootsHash, fromStateTransitionsHash } = + this.preimage!; + + const newElement = new Bundle({ + networkStateHash: networkState.hash(), + transactionsHash: state.transactionList.commitment, + pendingSTBatchesHash: { + from: fromStateTransitionsHash, + to: state.pendingSTBatches.commitment, + }, + witnessedRootsHash: { + from: fromWitnessedRootsHash, + to: state.witnessedRoots.commitment, + }, + }); + + // We always overwrite here, the invariant is that the preimage is + // either the actual preimage in case of addition to the existing bundle + // or the current commitment in case of a new bundle + this.commitment = preimage; + this.push(newElement); + } +} diff --git a/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts b/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts index 9a8250168..7226492da 100644 --- a/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts +++ b/packages/protocol/src/prover/accumulators/WitnessedRootHashList.ts @@ -52,7 +52,7 @@ export class WitnessedRootHashList extends DefaultProvableHashList Already covered in BlockProver + // (1) don't append if witnessedRoot == finalizedRoot -> Already covered in BlockProver // (2) don't append if preimage.push({ finalizedRoot, pendingSTBatchesHash }) == this.commitment const skipPush = preimageCheckList.commitment.equals(this.commitment); diff --git a/packages/protocol/src/prover/block/BlockProvable.ts b/packages/protocol/src/prover/block/BlockProvable.ts index dbcb311fe..c0396b177 100644 --- a/packages/protocol/src/prover/block/BlockProvable.ts +++ b/packages/protocol/src/prover/block/BlockProvable.ts @@ -1,4 +1,4 @@ -import { Bool, Field, Proof, Struct } from "o1js"; +import { Bool, Field, Proof, Provable, Struct } from "o1js"; import { CompilableModule, WithZkProgrammable } from "@proto-kit/common"; import { StateTransitionProof } from "../statetransition/StateTransitionProvable"; @@ -10,15 +10,68 @@ import { WitnessedRootHashList, WitnessedRootWitness, } from "../accumulators/WitnessedRootHashList"; -import { - TransactionProof, - TransactionProverState, - TransactionProverStateCommitments, -} from "../transaction/TransactionProvable"; +import { TransactionProof } from "../transaction/TransactionProvable"; +import { BundleHashList, FieldTransition } from "../accumulators/BlockHashList"; +import { NonMethods } from "../../utils/utils"; import { BlockHashMerkleTreeWitness } from "./accummulators/BlockHashMerkleTree"; -export class BlockProverState extends TransactionProverState { +export const BLOCK_ARGUMENT_BATCH_SIZE = 4; + +export class BlockArguments extends Struct({ + afterBlockRootWitness: WitnessedRootWitness, + transactionsHash: Field, + pendingSTBatchesHash: FieldTransition, + witnessedRootsHash: FieldTransition, + isDummy: Bool, +}) { + public static noop( + state: NonMethods>, + stateRoot: Field + ) { + return new BlockArguments({ + afterBlockRootWitness: { + witnessedRoot: stateRoot, + preimage: Field(0), + }, + transactionsHash: Field(0), + pendingSTBatchesHash: { + from: state.pendingSTBatches.commitment, + to: state.pendingSTBatches.commitment, + }, + witnessedRootsHash: { + from: state.witnessedRoots.commitment, + to: state.witnessedRoots.commitment, + }, + isDummy: Bool(true), + }); + } +} + +export class BlockArgumentsBatch extends Struct({ + batch: Provable.Array(BlockArguments, BLOCK_ARGUMENT_BATCH_SIZE), +}) {} + +export class BlockProverState { + /** + * The network state which gives access to values such as blockHeight + * This value is the same for the whole batch (L2 block) + */ + bundleList: BundleHashList; + + /** + * A variant of the transactionsHash that is never reset. + * Thought for usage in the sequence state mempool. + * In comparison, transactionsHash restarts at 0 for every new block + */ + eternalTransactionsList: TransactionHashList; + + pendingSTBatches: AppliedBatchHashList; + + incomingMessages: MinaActionsHashList; + + witnessedRoots: WitnessedRootHashList; + /** * The current state root of the block prover */ @@ -32,50 +85,184 @@ export class BlockProverState extends TransactionProverState { blockNumber: Field; + blockWitness: BlockHashMerkleTreeWitness; + + networkState: NetworkState; + constructor(args: { - transactionList: TransactionHashList; networkState: NetworkState; eternalTransactionsList: TransactionHashList; pendingSTBatches: AppliedBatchHashList; - incomingMessages: MinaActionsHashList; - witnessedRoots: WitnessedRootHashList; stateRoot: Field; blockHashRoot: Field; blockNumber: Field; + bundleList: BundleHashList; + blockWitness: BlockHashMerkleTreeWitness; + witnessedRoots: WitnessedRootHashList; + incomingMessages: MinaActionsHashList; }) { - super(args); + this.bundleList = args.bundleList; + this.eternalTransactionsList = args.eternalTransactionsList; + this.pendingSTBatches = args.pendingSTBatches; this.stateRoot = args.stateRoot; this.blockHashRoot = args.blockHashRoot; this.blockNumber = args.blockNumber; + this.networkState = args.networkState; + this.blockWitness = args.blockWitness; + this.witnessedRoots = args.witnessedRoots; + this.incomingMessages = args.incomingMessages; } public toCommitments(): BlockProverPublicInput { return { - ...super.toCommitments(), + remainders: { + bundlesHash: this.bundleList.commitment, + pendingSTBatchesHash: this.pendingSTBatches.commitment, + witnessedRootsHash: this.witnessedRoots.commitment, + }, + eternalTransactionsHash: this.eternalTransactionsList.commitment, + incomingMessagesHash: this.incomingMessages.commitment, stateRoot: this.stateRoot, blockHashRoot: this.blockHashRoot, blockNumber: this.blockNumber, + networkStateHash: this.networkState.hash(), }; } - public static fromCommitments( + public static blockProverFromCommitments( publicInput: BlockProverPublicInput, - networkState: NetworkState + networkState: NetworkState, + blockWitness: BlockHashMerkleTreeWitness ): BlockProverState { return new BlockProverState({ - ...super.fromCommitments(publicInput, networkState), + bundleList: new BundleHashList(publicInput.remainders.bundlesHash), + eternalTransactionsList: new TransactionHashList( + publicInput.eternalTransactionsHash + ), + incomingMessages: new MinaActionsHashList( + publicInput.incomingMessagesHash + ), + pendingSTBatches: new AppliedBatchHashList( + publicInput.remainders.pendingSTBatchesHash + ), + witnessedRoots: new WitnessedRootHashList( + publicInput.remainders.witnessedRootsHash + ), stateRoot: publicInput.stateRoot, blockHashRoot: publicInput.blockHashRoot, blockNumber: publicInput.blockNumber, + networkState, + blockWitness, + }); + } + + public copy() { + return BlockProverState.fromFields(this.toFields()); + } + + public toFields() { + return [ + this.bundleList.commitment, + this.eternalTransactionsList.commitment, + this.pendingSTBatches.commitment, + this.incomingMessages.commitment, + this.witnessedRoots.commitment, + this.stateRoot, + this.blockHashRoot, + this.blockNumber, + ...NetworkState.toFields(this.networkState), + ...BlockHashMerkleTreeWitness.toFields(this.blockWitness), + ]; + } + + // TODO Unit test + public static fromFields(fields: Field[]) { + return new BlockProverState({ + bundleList: new BundleHashList(fields[0]), + eternalTransactionsList: new TransactionHashList(fields[1]), + pendingSTBatches: new AppliedBatchHashList(fields[2]), + incomingMessages: new MinaActionsHashList(fields[3]), + witnessedRoots: new WitnessedRootHashList(fields[4]), + stateRoot: fields[5], + blockHashRoot: fields[6], + blockNumber: fields[7], + networkState: new NetworkState(NetworkState.fromFields(fields.slice(8))), + blockWitness: new BlockHashMerkleTreeWitness( + BlockHashMerkleTreeWitness.fromFields( + fields.slice(8 + NetworkState.sizeInFields()) + ) + ), + }); + } + + public static choose( + condition: Bool, + a: BlockProverState, + b: BlockProverState + ) { + return new BlockProverState({ + bundleList: new BundleHashList( + Provable.if(condition, a.bundleList.commitment, b.bundleList.commitment) + ), + eternalTransactionsList: new TransactionHashList( + Provable.if( + condition, + a.eternalTransactionsList.commitment, + b.eternalTransactionsList.commitment + ) + ), + pendingSTBatches: new AppliedBatchHashList( + Provable.if( + condition, + a.pendingSTBatches.commitment, + b.pendingSTBatches.commitment + ) + ), + incomingMessages: new MinaActionsHashList( + Provable.if( + condition, + a.incomingMessages.commitment, + b.incomingMessages.commitment + ) + ), + witnessedRoots: new WitnessedRootHashList( + Provable.if( + condition, + a.witnessedRoots.commitment, + b.witnessedRoots.commitment + ) + ), + stateRoot: Provable.if(condition, a.stateRoot, b.stateRoot), + blockHashRoot: Provable.if(condition, a.blockHashRoot, b.blockHashRoot), + blockWitness: new BlockHashMerkleTreeWitness( + Provable.if( + condition, + BlockHashMerkleTreeWitness, + a.blockWitness, + b.blockWitness + ) + ), + blockNumber: Provable.if(condition, a.blockNumber, b.blockNumber), + networkState: new NetworkState( + Provable.if(condition, NetworkState, a.networkState, b.networkState) + ), }); } } export const BlockProverStateCommitments = { - ...TransactionProverStateCommitments, + remainders: { + // Commitment to the list of unprocessed (pending) batches of STs that need to be proven + pendingSTBatchesHash: Field, + witnessedRootsHash: Field, + bundlesHash: Field, + }, + eternalTransactionsHash: Field, + incomingMessagesHash: Field, stateRoot: Field, blockHashRoot: Field, blockNumber: Field, + networkStateHash: Field, }; export class BlockProverPublicInput extends Struct( @@ -84,13 +271,9 @@ export class BlockProverPublicInput extends Struct( export class BlockProverPublicOutput extends Struct({ ...BlockProverStateCommitments, - closed: Bool, }) { - public equals(input: BlockProverPublicInput, closed: Bool): Bool { - const output2 = BlockProverPublicOutput.toFields({ - ...input, - closed, - }); + public equals(input: BlockProverPublicInput): Bool { + const output2 = BlockProverPublicOutput.toFields(input); const output1 = BlockProverPublicOutput.toFields(this); return output1 .map((value1, index) => value1.equals(output2[index])) @@ -103,14 +286,15 @@ export type BlockProof = Proof; export interface BlockProvable extends WithZkProgrammable, CompilableModule { - proveBlock: ( + proveBlockBatch: ( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, stateTransitionProof: StateTransitionProof, - deferSTs: Bool, - afterBlockRootWitness: WitnessedRootWitness, - transactionProof: TransactionProof + deferSTProof: Bool, + transactionProof: TransactionProof, + deferTransactionProof: Bool, + batch: BlockArgumentsBatch ) => Promise; merge: ( diff --git a/packages/protocol/src/prover/block/BlockProver.ts b/packages/protocol/src/prover/block/BlockProver.ts index ede4ea8c9..83485f3be 100644 --- a/packages/protocol/src/prover/block/BlockProver.ts +++ b/packages/protocol/src/prover/block/BlockProver.ts @@ -6,9 +6,9 @@ import { CompileArtifact, CompileRegistry, log, - MAX_FIELD, PlainZkProgram, provableMethod, + reduceSequential, WithZkProgrammable, ZkProgrammable, } from "@proto-kit/common"; @@ -26,11 +26,11 @@ import { AfterBlockHookArguments, BeforeBlockHookArguments, ProvableBlockHook, - toProvableHookBlockState, + toAfterBlockHookArgument, + toBeforeBlockHookArgument, } from "../../protocol/ProvableBlockHook"; import { NetworkState } from "../../model/network/NetworkState"; import { assertEqualsIf } from "../../utils/utils"; -import { WitnessedRootWitness } from "../accumulators/WitnessedRootHashList"; import { StateServiceProvider } from "../../state/StateServiceProvider"; import { executeHooks } from "../utils"; import { @@ -39,6 +39,7 @@ import { TransactionProverPublicInput, TransactionProverPublicOutput, } from "../transaction/TransactionProvable"; +import { Bundle } from "../accumulators/BlockHashList"; import { BlockProvable, @@ -46,6 +47,8 @@ import { BlockProverPublicInput, BlockProverPublicOutput, BlockProverState, + BlockArgumentsBatch, + BlockArguments, } from "./BlockProvable"; import { BlockHashMerkleTreeWitness, @@ -57,17 +60,12 @@ const errors = { `${propertyName} not matching: ${step}`, propertyNotMatching: (propertyName: string) => `${propertyName} not matching`, - - stateRootNotMatching: (step: string) => - errors.propertyNotMatchingStep("StateRoots", step), - - transactionsHashNotMatching: (step: string) => - errors.propertyNotMatchingStep("Transactions hash", step), - - networkStateHashNotMatching: (step: string) => - errors.propertyNotMatchingStep("Network state hash", step), }; +type BlockHookArgument = T extends "before" + ? BeforeBlockHookArguments + : AfterBlockHookArguments; + export class BlockProverProgrammable extends ZkProgrammable< BlockProverPublicInput, BlockProverPublicOutput @@ -94,16 +92,16 @@ export class BlockProverProgrammable extends ZkProgrammable< return this.prover.areProofsEnabled; } - public async executeBlockHooks< - T extends BeforeBlockHookArguments | AfterBlockHookArguments, - >( + public async executeBlockHooks( + type: T, hook: ( module: ProvableBlockHook, networkState: NetworkState, - args: T + args: BlockHookArgument ) => Promise, - hookArguments: T, - inputNetworkState: NetworkState + hookArguments: BlockHookArgument, + inputNetworkState: NetworkState, + isDummy: Bool ) { const transaction = RuntimeTransaction.dummyTransaction(); const startingInputs = { @@ -111,26 +109,33 @@ export class BlockProverProgrammable extends ZkProgrammable< networkState: inputNetworkState, }; - return await executeHooks(startingInputs, async () => { - const executionContext = container.resolve(RuntimeMethodExecutionContext); - - return await this.blockHooks.reduce>( - async (networkStatePromise, blockHook) => { - const networkState = await networkStatePromise; - - // Setup context for potential calls to runtime methods. - // With the special case that we set the new networkstate for every hook - // We also have to put in a dummy transaction for network.transaction - executionContext.setup({ - transaction: RuntimeTransaction.dummyTransaction(), - networkState, - }); - - return await hook(blockHook, networkState, hookArguments); - }, - Promise.resolve(inputNetworkState) - ); - }); + return await executeHooks( + startingInputs, + `${type}Block`, + async () => { + const executionContext = container.resolve( + RuntimeMethodExecutionContext + ); + + return await this.blockHooks.reduce>( + async (networkStatePromise, blockHook) => { + const networkState = await networkStatePromise; + + // Setup context for potential calls to runtime methods. + // With the special case that we set the new networkstate for every hook + // We also have to put in a dummy transaction for network.transaction + executionContext.setup({ + transaction: RuntimeTransaction.dummyTransaction(), + networkState, + }); + + return await hook(blockHook, networkState, hookArguments); + }, + Promise.resolve(inputNetworkState) + ); + }, + isDummy + ); } public includeSTProof( @@ -216,102 +221,22 @@ export class BlockProverProgrammable extends ZkProgrammable< } @provableMethod() - public async proveBlock( + public async proveBlockBatch( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, stateTransitionProof: StateTransitionProof, deferSTProof: Bool, - afterBlockRootWitness: WitnessedRootWitness, - transactionProof: TransactionProof + transactionProof: TransactionProof, + deferTransactionProof: Bool, + batch: BlockArgumentsBatch ): Promise { - // 1. Make assertions about the inputs - publicInput.transactionsHash.assertEquals( - Field(0), - "Transactionshash has to start at 0" - ); - - // TransactionProof format checks - transactionProof.publicInput.networkStateHash.assertEquals( - transactionProof.publicOutput.networkStateHash, - "TransactionProof cannot alter the network state" - ); - - const state = BlockProverState.fromCommitments(publicInput, networkState); - - // Verify Transaction proof if it has at least 1 tx - i.e. the - // input and output doesn't match fully - // We have to compare the whole input and output because we can make no - // assumptions about the values, since it can be an arbitrary dummy-proof - const isEmptyTransition = TransactionProverPublicInput.equals( - transactionProof.publicOutput, - transactionProof.publicInput - ); - const skipTransactionProofVerification = isEmptyTransition; - const verifyTransactionProof = isEmptyTransition.not(); - log.provable.debug("VerifyIf TxProof", verifyTransactionProof); - transactionProof.verifyIf(verifyTransactionProof); - - // 2. Execute beforeBlock hooks - const beforeBlockArgs = toProvableHookBlockState(state); - const beforeBlockResult = await this.executeBlockHooks( - async (module, networkStateArg, args) => - await module.beforeBlock(networkStateArg, args), - beforeBlockArgs, - networkState - ); - - state.pendingSTBatches.push(beforeBlockResult.batch); - - // 4. Apply TX-type BlockProof - transactionProof.publicInput.networkStateHash - .equals(beforeBlockResult.result.hash()) - .or(skipTransactionProofVerification) - .assertTrue( - "TransactionProof networkstate hash not matching beforeBlock hook result" - ); - - // Check that the transaction proof's STs start after the beforeBlock hook - transactionProof.publicInput.pendingSTBatchesHash.assertEquals( - state.pendingSTBatches.commitment, - "Transaction proof doesn't start their STs after the beforeBlockHook" - ); - // Fast-forward the stBatchHashList to after all transactions appended - state.pendingSTBatches.commitment = - transactionProof.publicOutput.pendingSTBatchesHash; - - // Fast-forward block content commitments by the results of the aggregated transaction proof - // Implicitly, the 'from' values here are asserted against the publicInput, since the hashlists - // are created out of the public input - state.transactionList.fastForward({ - from: transactionProof.publicInput.transactionsHash, - to: transactionProof.publicOutput.transactionsHash, - }); - state.eternalTransactionsList.fastForward({ - from: transactionProof.publicInput.eternalTransactionsHash, - to: transactionProof.publicOutput.eternalTransactionsHash, - }); - state.incomingMessages.fastForward({ - from: transactionProof.publicInput.incomingMessagesHash, - to: transactionProof.publicOutput.incomingMessagesHash, - }); - - // Witness root - const isEmpty = state.pendingSTBatches.commitment.equals(0); - isEmpty - .implies(state.stateRoot.equals(afterBlockRootWitness.witnessedRoot)) - .assertTrue(); - - state.witnessedRoots.witnessRoot( - { - appliedBatchListState: state.pendingSTBatches.commitment, - root: afterBlockRootWitness.witnessedRoot, - }, - afterBlockRootWitness.preimage, - isEmpty.not() + publicInput.networkStateHash.assertEquals( + networkState.hash(), + "Network state not valid" ); - // 5. Calculate the new block tree hash + // Calculate the new block tree hash const blockIndex = blockWitness.calculateIndex(); blockIndex.assertEquals(publicInput.blockNumber); @@ -323,37 +248,70 @@ export class BlockProverProgrammable extends ZkProgrammable< "Supplied block hash witness not matching state root" ); - state.blockHashRoot = blockWitness.calculateRoot( - new BlockHashTreeEntry({ - block: { - index: blockIndex, - transactionListHash: state.transactionList.commitment, - }, - closed: Bool(true), - }).hash() + let state = BlockProverState.blockProverFromCommitments( + publicInput, + networkState, + blockWitness ); - // 6. Execute afterBlock hooks + // Prove blocks iteratively + state = await reduceSequential( + batch.batch, + async (current, block) => { + const result = await this.proveBlock(current.copy(), block); - // Switch state service to afterBlock one - this.stateServiceProvider.popCurrentStateService(); + this.stateServiceProvider.popCurrentStateService(); - const afterBlockHookArgs = toProvableHookBlockState(state); - const afterBlockResult = await this.executeBlockHooks( - async (module, networkStateArg, args) => - await module.afterBlock(networkStateArg, args), + return BlockProverState.choose(block.isDummy, current, result); + }, + state + ); + + // Verify Transaction proof if it has at least 1 tx and it isn't deferred + const verifyTransactionProof = deferTransactionProof + .not() + .and(state.bundleList.isEmpty().not()); + + transactionProof.verifyIf(verifyTransactionProof); + + // Fast-forward transaction trackers by the results of the aggregated transaction proof + // Implicitly, the 'from' values here are asserted against the publicInput, since the hashlists + // are created out of the public input + state.eternalTransactionsList.fastForwardIf( { - ...afterBlockHookArgs, - stateRoot: afterBlockRootWitness.witnessedRoot, + from: transactionProof.publicInput.eternalTransactionsHash, + to: transactionProof.publicOutput.eternalTransactionsHash, }, - beforeBlockResult.result + verifyTransactionProof, + "eternalTransactionsList" ); - state.pendingSTBatches.push(afterBlockResult.batch); + state.incomingMessages.fastForwardIf( + { + from: transactionProof.publicInput.incomingMessagesHash, + to: transactionProof.publicOutput.incomingMessagesHash, + }, + verifyTransactionProof, + "incomingMessages" + ); - state.networkState = afterBlockResult.result; + // Cancel out remainders for transaction proof + assertEqualsIf( + transactionProof.publicInput.bundlesHash, + Field(0), + verifyTransactionProof, + "TransactionProof has to start bundles at 0" + ); - // 7. Close block + // Fast Backwards actually, but logic holds + state.bundleList.fastForwardIf( + { + from: transactionProof.publicOutput.bundlesHash, + to: state.bundleList.empty(), + }, + verifyTransactionProof, + "bundles hash" + ); // Verify ST Proof only if STs have been emitted, // and we don't defer the verification of the STs @@ -375,172 +333,173 @@ export class BlockProverProgrammable extends ZkProgrammable< state.pendingSTBatches.commitment = stateProofResult.pendingSTBatchesHash; state.witnessedRoots.commitment = stateProofResult.witnessedRootsHash; - state.blockNumber = blockIndex.add(1); - - return new BlockProverPublicOutput({ - ...state.toCommitments(), - closed: Bool(true), - }); + return new BlockProverPublicOutput(state.toCommitments()); } - @provableMethod() - public async merge( - publicInput: BlockProverPublicInput, - proof1: BlockProof, - proof2: BlockProof - ): Promise { - proof1.verify(); - proof2.verify(); + private async proveBlock( + state: BlockProverState, + args: BlockArguments + ): Promise { + const { networkState, blockWitness } = state; + const { afterBlockRootWitness, transactionsHash, isDummy } = args; - // Check state - publicInput.stateRoot.assertEquals( - proof1.publicInput.stateRoot, - errors.stateRootNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.stateRoot.assertEquals( - proof2.publicInput.stateRoot, - errors.stateRootNotMatching("proof1.to -> proof2.from") + // 1. Execute beforeBlock hooks + const beforeBlockArgs = toBeforeBlockHookArgument(state); + const beforeBlockResult = await this.executeBlockHooks( + "before", + async (module, networkStateArg, hookArgs) => + await module.beforeBlock(networkStateArg, hookArgs), + beforeBlockArgs, + networkState, + isDummy ); - // Check transaction list hash. - // Only assert them if these are tx proofs, skip for closed proofs - publicInput.transactionsHash - .equals(proof1.publicInput.transactionsHash) - .or(proof1.publicOutput.closed) - .assertTrue( - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.transactionsHash - .equals(proof2.publicInput.transactionsHash) - .or(proof1.publicOutput.closed) - .assertTrue( - errors.transactionsHashNotMatching("proof1.to -> proof2.from") - ); + state.pendingSTBatches.push(beforeBlockResult.batch); - // Check networkhash - publicInput.networkStateHash.assertEquals( - proof1.publicInput.networkStateHash, - errors.networkStateHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.networkStateHash.assertEquals( - proof2.publicInput.networkStateHash, - errors.networkStateHashNotMatching("proof1.to -> proof2.from") + // 2. "Apply" TX-type BlockProof + args.pendingSTBatchesHash.from.assertEquals( + state.pendingSTBatches.commitment ); + args.witnessedRootsHash.from.assertEquals(state.witnessedRoots.commitment); + const isEmptyBlock = transactionsHash.equals(Field(0)); + const isNotEmptyBlock = isEmptyBlock.not(); - // Check blockHashRoot - publicInput.blockHashRoot.assertEquals( - proof1.publicInput.blockHashRoot, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + // Check & fast-forward the stBatchHashList to after all transactions appended + state.pendingSTBatches.fastForward( + args.pendingSTBatchesHash, + "Transaction proof doesn't start their STs after the beforeBlockHook" ); - proof1.publicOutput.blockHashRoot.assertEquals( - proof2.publicInput.blockHashRoot, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") + // Same for witnessedRootsHash + state.witnessedRoots.fastForward( + args.witnessedRootsHash, + "Transaction proof doesn't start with correct witnessed roots hash" ); - // Check eternalTransactionsHash - publicInput.eternalTransactionsHash.assertEquals( - proof1.publicInput.eternalTransactionsHash, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.eternalTransactionsHash.assertEquals( - proof2.publicInput.eternalTransactionsHash, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") - ); + // Add block to bundles list + const bundle = new Bundle({ + transactionsHash: transactionsHash, + networkStateHash: beforeBlockResult.result.hash(), + pendingSTBatchesHash: args.pendingSTBatchesHash, + witnessedRootsHash: args.witnessedRootsHash, + }); + state.bundleList.pushIf(bundle, isNotEmptyBlock); - // Check incomingMessagesHash - publicInput.incomingMessagesHash.assertEquals( - proof1.publicInput.incomingMessagesHash, - errors.propertyNotMatchingStep( - "IncomingMessagesHash", - "publicInput.from -> proof1.from" - ) - ); - proof1.publicOutput.incomingMessagesHash.assertEquals( - proof2.publicInput.incomingMessagesHash, - errors.propertyNotMatchingStep( - "IncomingMessagesHash", - "proof1.to -> proof2.from" - ) + // 3. + // Calculate new block tree root and increment witness + // Blocknumber as the index here is already authenticated previously + const [root, newWitness] = blockWitness.calculateRootIncrement( + state.blockNumber, + new BlockHashTreeEntry({ + block: { + index: state.blockNumber, + transactionListHash: transactionsHash, + }, + closed: Bool(true), + }).hash() ); - // Check pendingSTBatchesHash - publicInput.pendingSTBatchesHash.assertEquals( - proof1.publicInput.pendingSTBatchesHash, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.pendingSTBatchesHash.assertEquals( - proof2.publicInput.pendingSTBatchesHash, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") + state.blockHashRoot = root; + state.blockWitness = newWitness; + + state.blockNumber = state.blockNumber.add(1); + + // 4. Execute afterBlock hooks + // Witness root + const isEmpty = state.pendingSTBatches.commitment.equals(0); + isEmpty + .implies(state.stateRoot.equals(afterBlockRootWitness.witnessedRoot)) + .assertTrue(); + + state.witnessedRoots.witnessRoot( + { + appliedBatchListState: state.pendingSTBatches.commitment, + root: afterBlockRootWitness.witnessedRoot, + }, + afterBlockRootWitness.preimage, + isEmpty.not() ); - // Check witnessedRootsHash - publicInput.witnessedRootsHash.assertEquals( - proof1.publicInput.witnessedRootsHash, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + // Switch state service to afterBlock one + this.stateServiceProvider.popCurrentStateService(); + + // Execute hooks + const afterBlockHookArgs = toAfterBlockHookArgument( + state, + afterBlockRootWitness.witnessedRoot, + transactionsHash ); - proof1.publicOutput.witnessedRootsHash.assertEquals( - proof2.publicInput.witnessedRootsHash, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") + const afterBlockResult = await this.executeBlockHooks( + "after", + async (module, networkStateArg, hookArgs) => + await module.afterBlock(networkStateArg, hookArgs), + { + ...afterBlockHookArgs, + }, + beforeBlockResult.result, + isDummy ); - // Assert closed indicator matches - // (i.e. we can only merge TX-Type and Block-Type with each other) - proof1.publicOutput.closed.assertEquals( - proof2.publicOutput.closed, - "Closed indicators not matching" - ); + // Apply state and network state changes + state.pendingSTBatches.push(afterBlockResult.batch); + state.networkState = afterBlockResult.result; - // Either - // blockNumbers are unset and proofs are unclosed or - // both blocks are closed, then they have to increment or - // one block is closed, then height has to be the same - - // Imperative algo would look like - // if(proof1.height == MAX && proof2.height == MAX){ - // assert !proof1.closed && !proof2.closed; - // }else if(proof1.closed && proof2.closed){ - // assert proof1.height + 1 == proof2.height - // // next one is omitted for now - // }else if(proof1.closed || proof2.closed{ - // assert proof1.height == proof2.height - // } - - const proof1Closed = proof1.publicOutput.closed; - const proof2Closed = proof2.publicOutput.closed; - - const blockNumberProgressionValid = publicInput.blockNumber - .equals(proof1.publicInput.blockNumber) - .and( - proof1.publicOutput.blockNumber.equals(proof2.publicInput.blockNumber) + return state; + } + + @provableMethod() + public async merge( + publicInput: BlockProverPublicInput, + proof1: BlockProof, + proof2: BlockProof + ): Promise { + proof1.verify(); + proof2.verify(); + + function checkProperty< + Key extends + | "stateRoot" + | "networkStateHash" + | "blockHashRoot" + | "eternalTransactionsHash" + | "incomingMessagesHash" + | "blockNumber", + >(key: Key) { + // Check state + publicInput[key].assertEquals( + proof1.publicInput[key], + errors.propertyNotMatchingStep(key, "publicInput.from -> proof1.from") + ); + proof1.publicOutput[key].assertEquals( + proof2.publicInput[key], + errors.propertyNotMatchingStep(key, "proof1.to -> proof2.from") + ); + } + + function checkRemainderProperty< + Key extends "pendingSTBatchesHash" | "witnessedRootsHash" | "bundlesHash", + >(key: Key) { + // Check state + publicInput.remainders[key].assertEquals( + proof1.publicInput.remainders[key], + errors.propertyNotMatchingStep(key, "publicInput.from -> proof1.from") ); + proof1.publicOutput.remainders[key].assertEquals( + proof2.publicInput.remainders[key], + errors.propertyNotMatchingStep(key, "proof1.to -> proof2.from") + ); + } - // For tx proofs, we check that the progression starts and end with MAX - // in addition to that both proofs are non-closed - const isValidTransactionMerge = publicInput.blockNumber - .equals(MAX_FIELD) - .and(blockNumberProgressionValid) - .and(proof1Closed.or(proof2Closed).not()); - - const isValidClosedMerge = proof1Closed - .and(proof2Closed) - .and(blockNumberProgressionValid); - - isValidTransactionMerge - .or(isValidClosedMerge) - .assertTrue("Invalid BlockProof merge"); - - return new BlockProverPublicOutput({ - stateRoot: proof2.publicOutput.stateRoot, - transactionsHash: proof2.publicOutput.transactionsHash, - networkStateHash: proof2.publicOutput.networkStateHash, - blockHashRoot: proof2.publicOutput.blockHashRoot, - eternalTransactionsHash: proof2.publicOutput.eternalTransactionsHash, - incomingMessagesHash: proof2.publicOutput.incomingMessagesHash, - closed: isValidClosedMerge, - blockNumber: proof2.publicOutput.blockNumber, - pendingSTBatchesHash: proof2.publicOutput.pendingSTBatchesHash, - witnessedRootsHash: proof2.publicOutput.witnessedRootsHash, - }); + checkProperty("stateRoot"); + checkProperty("networkStateHash"); + checkProperty("blockHashRoot"); + checkProperty("eternalTransactionsHash"); + checkProperty("incomingMessagesHash"); + + checkRemainderProperty("bundlesHash"); + checkRemainderProperty("pendingSTBatchesHash"); + checkRemainderProperty("witnessedRootsHash"); + + return proof2.publicOutput; } /** @@ -555,7 +514,7 @@ export class BlockProverProgrammable extends ZkProgrammable< const { prover, stateTransitionProver, transactionProver } = this; const StateTransitionProofClass = stateTransitionProver.zkProgram[0].Proof; const TransactionProofClass = transactionProver.zkProgram[0].Proof; - const proveBlock = prover.proveBlock.bind(prover); + const proveBlockBatch = prover.proveBlockBatch.bind(prover); const merge = prover.merge.bind(prover); const program = ZkProgram({ @@ -564,33 +523,36 @@ export class BlockProverProgrammable extends ZkProgrammable< publicOutput: BlockProverPublicOutput, methods: { - proveBlock: { + proveBlockBatch: { privateInputs: [ NetworkState, BlockHashMerkleTreeWitness, StateTransitionProofClass, Bool, - WitnessedRootWitness, TransactionProofClass, + Bool, + BlockArgumentsBatch, ], async method( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, stateTransitionProof: StateTransitionProof, - deferSTs: Bool, - afterBlockRootWitness: WitnessedRootWitness, - transactionProof: BlockProof + deferSTProof: Bool, + transactionProof: TransactionProof, + deferTransactionProof: Bool, + batch: BlockArgumentsBatch ) { return { - publicOutput: await proveBlock( + publicOutput: await proveBlockBatch( publicInput, networkState, blockWitness, stateTransitionProof, - deferSTs, - afterBlockRootWitness, - transactionProof + deferSTProof, + transactionProof, + deferTransactionProof, + batch ), }; }, @@ -614,7 +576,7 @@ export class BlockProverProgrammable extends ZkProgrammable< }); const methods = { - proveBlock: program.proveBlock, + proveBlockBatch: program.proveBlockBatch, merge: program.merge, }; @@ -683,23 +645,25 @@ export class BlockProver }); } - public proveBlock( + public proveBlockBatch( publicInput: BlockProverPublicInput, networkState: NetworkState, blockWitness: BlockHashMerkleTreeWitness, stateTransitionProof: StateTransitionProof, - deferSTs: Bool, - afterBlockRootWitness: WitnessedRootWitness, - transactionProof: TransactionProof + deferSTProof: Bool, + transactionProof: TransactionProof, + deferTransactionProof: Bool, + batch: BlockArgumentsBatch ): Promise { - return this.zkProgrammable.proveBlock( + return this.zkProgrammable.proveBlockBatch( publicInput, networkState, blockWitness, stateTransitionProof, - deferSTs, - afterBlockRootWitness, - transactionProof + deferSTProof, + transactionProof, + deferTransactionProof, + batch ); } diff --git a/packages/protocol/src/prover/transaction/TransactionProvable.ts b/packages/protocol/src/prover/transaction/TransactionProvable.ts index 4180fd906..bb99ab0cc 100644 --- a/packages/protocol/src/prover/transaction/TransactionProvable.ts +++ b/packages/protocol/src/prover/transaction/TransactionProvable.ts @@ -10,6 +10,7 @@ import { TransactionHashList } from "../accumulators/TransactionHashList"; import { AppliedBatchHashList } from "../accumulators/AppliedBatchHashList"; import { MinaActionsHashList } from "../../utils/MinaPrefixedProvableHashList"; import { WitnessedRootHashList } from "../accumulators/WitnessedRootHashList"; +import { BundleHashList, BundlePreimage } from "../accumulators/BlockHashList"; export class TransactionProverState { /** @@ -22,7 +23,7 @@ export class TransactionProverState { * The network state which gives access to values such as blockHeight * This value is the same for the whole batch (L2 block) */ - networkState: NetworkState; + bundleList: BundleHashList; /** * A variant of the transactionsHash that is never reset. @@ -39,14 +40,14 @@ export class TransactionProverState { constructor(args: { transactionList: TransactionHashList; - networkState: NetworkState; + bundleList: BundleHashList; eternalTransactionsList: TransactionHashList; pendingSTBatches: AppliedBatchHashList; incomingMessages: MinaActionsHashList; witnessedRoots: WitnessedRootHashList; }) { this.transactionList = args.transactionList; - this.networkState = args.networkState; + this.bundleList = args.bundleList; this.eternalTransactionsList = args.eternalTransactionsList; this.pendingSTBatches = args.pendingSTBatches; this.incomingMessages = args.incomingMessages; @@ -55,51 +56,56 @@ export class TransactionProverState { public toCommitments(): TransactionProverPublicInput { return { - networkStateHash: this.networkState.hash(), - pendingSTBatchesHash: this.pendingSTBatches.commitment, - transactionsHash: this.transactionList.commitment, + bundlesHash: this.bundleList.commitment, + // pendingSTBatchesHash: this.pendingSTBatches.commitment, + // transactionsHash: this.transactionList.commitment, eternalTransactionsHash: this.eternalTransactionsList.commitment, incomingMessagesHash: this.incomingMessages.commitment, - witnessedRootsHash: this.witnessedRoots.commitment, + // witnessedRootsHash: this.witnessedRoots.commitment, }; } public static fromCommitments( publicInput: TransactionProverPublicInput, - networkState: NetworkState + args: TransactionProverArguments ): TransactionProverState { - publicInput.networkStateHash.assertEquals( - networkState.hash(), - "ExecutionData Networkstate doesn't equal public input hash" - ); - return new TransactionProverState({ - networkState, - transactionList: new TransactionHashList(publicInput.transactionsHash), + // Stuff that has to be authenticated via public input, since it's not inside the bundle hash + bundleList: new BundleHashList( + publicInput.bundlesHash, + args.bundleListPreimage + ), eternalTransactionsList: new TransactionHashList( publicInput.eternalTransactionsHash ), incomingMessages: new MinaActionsHashList( publicInput.incomingMessagesHash ), - pendingSTBatches: new AppliedBatchHashList( - publicInput.pendingSTBatchesHash - ), - witnessedRoots: new WitnessedRootHashList(publicInput.witnessedRootsHash), + // Remainders (i.e. stuff that goes into the bundle) + transactionList: new TransactionHashList(args.transactionHash), + pendingSTBatches: new AppliedBatchHashList(args.pendingSTBatchesHash), + witnessedRoots: new WitnessedRootHashList(args.witnessedRootsHash), }); } } +// These are all linear trackers, i.e. continuously progressing without +// interruptions from the block prover export const TransactionProverStateCommitments = { - transactionsHash: Field, - // Commitment to the list of unprocessed (pending) batches of STs that need to be proven - pendingSTBatchesHash: Field, - witnessedRootsHash: Field, - networkStateHash: Field, + bundlesHash: Field, eternalTransactionsHash: Field, incomingMessagesHash: Field, }; +export class TransactionProverArguments extends Struct({ + // Commitment to the list of unprocessed (pending) batches of STs that need to be proven + pendingSTBatchesHash: Field, + witnessedRootsHash: Field, + transactionHash: Field, + bundleListPreimage: BundlePreimage, + networkState: NetworkState, +}) {} + export class TransactionProverPublicInput extends Struct( TransactionProverStateCommitments ) { @@ -135,15 +141,9 @@ export class DynamicRuntimeProof extends DynamicProof< static maxProofsVerified = 0 as const; } -export class BlockProverSingleTransactionExecutionData extends Struct({ +export class TransactionProverExecutionData extends Struct({ transaction: TransactionProverTransactionArguments, - networkState: NetworkState, -}) {} - -export class BlockProverMultiTransactionExecutionData extends Struct({ - transaction1: TransactionProverTransactionArguments, - transaction2: TransactionProverTransactionArguments, - networkState: NetworkState, + args: TransactionProverArguments, }) {} export type TransactionProof = Proof< @@ -160,14 +160,15 @@ export interface TransactionProvable proveTransaction: ( publicInput: TransactionProverPublicInput, runtimeProof: DynamicRuntimeProof, - executionData: BlockProverSingleTransactionExecutionData + executionData: TransactionProverExecutionData ) => Promise; proveTransactions: ( publicInput: TransactionProverPublicInput, runtimeProof1: DynamicRuntimeProof, runtimeProof2: DynamicRuntimeProof, - executionData: BlockProverMultiTransactionExecutionData + executionData1: TransactionProverExecutionData, + executionData2: TransactionProverExecutionData ) => Promise; merge: ( diff --git a/packages/protocol/src/prover/transaction/TransactionProver.ts b/packages/protocol/src/prover/transaction/TransactionProver.ts index 6c686ba17..4ca81a089 100644 --- a/packages/protocol/src/prover/transaction/TransactionProver.ts +++ b/packages/protocol/src/prover/transaction/TransactionProver.ts @@ -32,11 +32,11 @@ import { } from "../block/accummulators/RuntimeVerificationKeyTree"; import { - BlockProverMultiTransactionExecutionData, - BlockProverSingleTransactionExecutionData, + TransactionProverExecutionData, DynamicRuntimeProof, TransactionProof, TransactionProvable, + TransactionProverArguments, TransactionProverPublicInput, TransactionProverPublicOutput, TransactionProverState, @@ -53,8 +53,8 @@ const errors = { transactionsHashNotMatching: (step: string) => errors.propertyNotMatchingStep("Transactions hash", step), - networkStateHashNotMatching: (step: string) => - errors.propertyNotMatchingStep("Network state hash", step), + bundlesHashNotMatching: (step: string) => + errors.propertyNotMatchingStep("Bundles hash", step), }; type ApplyTransactionArguments = Omit< @@ -62,6 +62,10 @@ type ApplyTransactionArguments = Omit< "verificationKeyAttestation" >; +type TransactionHookArgument = T extends "before" + ? BeforeTransactionHookArguments + : AfterTransactionHookArguments; + export class TransactionProverZkProgrammable extends ZkProgrammable< TransactionProverPublicInput, TransactionProverPublicOutput @@ -96,6 +100,7 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< * @param runtimeOutput * @param executionData * @param networkState + * @param bundleListPreimage * @returns The new BlockProver-state to be used as public output */ public async applyTransaction( @@ -118,13 +123,12 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< // Apply beforeTransaction hook state transitions const beforeBatch = await this.executeTransactionHooks( + "before", async (module, args) => await module.beforeTransaction(args), beforeTxHookArguments, isMessage ); - state = addTransactionToBundle(state, runtimeOutput.isMessage, transaction); - state.pendingSTBatches.push(beforeBatch); state.pendingSTBatches.push({ @@ -132,6 +136,8 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< applied: runtimeOutput.status, }); + state = addTransactionToBundle(state, runtimeOutput.isMessage, transaction); + // Apply afterTransaction hook state transitions const afterTxHookArguments = toAfterTransactionHookArgument( executionData, @@ -144,6 +150,7 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< this.stateServiceProvider.popCurrentStateService(); const afterBatch = await this.executeTransactionHooks( + "after", async (module, args) => await module.afterTransaction(args), afterTxHookArguments, isMessage @@ -170,14 +177,6 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< // Validate layout of transaction witness transaction.assertTransactionType(isMessage); - // Check network state integrity against appProof - state.networkState - .hash() - .assertEquals( - runtimeOutput.networkStateHash, - "Network state does not match state used in AppProof" - ); - return new TransactionProverState(state); } @@ -201,15 +200,18 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< return verificationKey; } - private async executeTransactionHooks< - T extends BeforeTransactionHookArguments | AfterTransactionHookArguments, - >( - hook: (module: ProvableTransactionHook, args: T) => Promise, - hookArguments: T, + private async executeTransactionHooks( + type: T, + hook: ( + module: ProvableTransactionHook, + args: TransactionHookArgument + ) => Promise, + hookArguments: TransactionHookArgument, isMessage: Bool ) { const { batch, rawStatus } = await executeHooks( hookArguments, + `${type}Transaction`, async () => { for (const module of this.transactionHooks) { // eslint-disable-next-line no-await-in-loop @@ -227,10 +229,15 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< } public async proveTransactionInternal( - fromState: TransactionProverState, + publicInput: TransactionProverPublicInput, runtimeProof: DynamicRuntimeProof, - { transaction, networkState }: BlockProverSingleTransactionExecutionData - ): Promise { + transaction: TransactionProverTransactionArguments, + args: TransactionProverArguments + ): Promise { + const state = TransactionProverState.fromCommitments(publicInput, args); + + state.bundleList.checkLastBundleElement(state, args.networkState); + const verificationKey = this.verifyVerificationKeyAttestation( transaction.verificationKeyAttestation, transaction.transaction.methodId @@ -238,32 +245,30 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< runtimeProof.verify(verificationKey); - return await this.applyTransaction( - fromState, + const result = await this.applyTransaction( + state, runtimeProof.publicOutput, transaction, - networkState + args.networkState ); + + result.bundleList.addToBundle(result, args.networkState); + + return result.toCommitments(); } @provableMethod() public async proveTransaction( publicInput: TransactionProverPublicInput, runtimeProof: DynamicRuntimeProof, - executionData: BlockProverSingleTransactionExecutionData + executionData: TransactionProverExecutionData ): Promise { - const state = TransactionProverState.fromCommitments( + return await this.proveTransactionInternal( publicInput, - executionData.networkState - ); - - const stateTo = await this.proveTransactionInternal( - state, runtimeProof, - executionData + executionData.transaction, + executionData.args ); - - return new TransactionProverPublicOutput(stateTo.toCommitments()); } @provableMethod() @@ -271,30 +276,25 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< publicInput: TransactionProverPublicInput, runtimeProof1: DynamicRuntimeProof, runtimeProof2: DynamicRuntimeProof, - executionData: BlockProverMultiTransactionExecutionData + executionData1: TransactionProverExecutionData, + executionData2: TransactionProverExecutionData ): Promise { - const state = TransactionProverState.fromCommitments( + const state1 = await this.proveTransactionInternal( publicInput, - executionData.networkState + runtimeProof1, + executionData1.transaction, + executionData1.args ); - // this.staticChecks(publicInput); - - const state1 = await this.proveTransactionInternal(state, runtimeProof1, { - transaction: executionData.transaction1, - networkState: executionData.networkState, - }); - // Switch to next state record for 2nd tx beforeTx hook - // TODO Can be prevented by merging 1st afterTx + 2nd beforeTx this.stateServiceProvider.popCurrentStateService(); - const stateTo = await this.proveTransactionInternal(state1, runtimeProof2, { - transaction: executionData.transaction2, - networkState: executionData.networkState, - }); - - return new TransactionProverPublicOutput(stateTo.toCommitments()); + return await this.proveTransactionInternal( + state1, + runtimeProof2, + executionData2.transaction, + executionData2.args + ); } @provableMethod() @@ -306,27 +306,14 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< proof1.verify(); proof2.verify(); - // Check transaction list hash. - // Only assert them if these are tx proofs, skip for closed proofs - publicInput.transactionsHash - .equals(proof1.publicInput.transactionsHash) - .assertTrue( - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.transactionsHash - .equals(proof2.publicInput.transactionsHash) - .assertTrue( - errors.transactionsHashNotMatching("proof1.to -> proof2.from") - ); - - // Check networkhash - publicInput.networkStateHash.assertEquals( - proof1.publicInput.networkStateHash, - errors.networkStateHashNotMatching("publicInput.from -> proof1.from") + // Check bundlesHash + publicInput.bundlesHash.assertEquals( + proof1.publicInput.bundlesHash, + errors.bundlesHashNotMatching("publicInput.from -> proof1.from") ); - proof1.publicOutput.networkStateHash.assertEquals( - proof2.publicInput.networkStateHash, - errors.networkStateHashNotMatching("proof1.to -> proof2.from") + proof1.publicOutput.bundlesHash.assertEquals( + proof2.publicInput.bundlesHash, + errors.bundlesHashNotMatching("proof1.to -> proof2.from") ); // Check eternalTransactionsHash @@ -356,32 +343,31 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< ); // Check pendingSTBatchesHash - publicInput.pendingSTBatchesHash.assertEquals( - proof1.publicInput.pendingSTBatchesHash, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.pendingSTBatchesHash.assertEquals( - proof2.publicInput.pendingSTBatchesHash, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") - ); - - // Check witnessedRootsHash - publicInput.witnessedRootsHash.assertEquals( - proof1.publicInput.witnessedRootsHash, - errors.transactionsHashNotMatching("publicInput.from -> proof1.from") - ); - proof1.publicOutput.witnessedRootsHash.assertEquals( - proof2.publicInput.witnessedRootsHash, - errors.transactionsHashNotMatching("proof1.to -> proof2.from") - ); + // publicInput.pendingSTBatchesHash.assertEquals( + // proof1.publicInput.pendingSTBatchesHash, + // errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + // ); + // proof1.publicOutput.pendingSTBatchesHash.assertEquals( + // proof2.publicInput.pendingSTBatchesHash, + // errors.transactionsHashNotMatching("proof1.to -> proof2.from") + // ); + // + // // Check witnessedRootsHash + // publicInput.witnessedRootsHash.assertEquals( + // proof1.publicInput.witnessedRootsHash, + // errors.transactionsHashNotMatching("publicInput.from -> proof1.from") + // ); + // proof1.publicOutput.witnessedRootsHash.assertEquals( + // proof2.publicInput.witnessedRootsHash, + // errors.transactionsHashNotMatching("proof1.to -> proof2.from") + // ); return new TransactionProverPublicOutput({ - transactionsHash: proof2.publicOutput.transactionsHash, - networkStateHash: proof2.publicOutput.networkStateHash, + bundlesHash: proof2.publicOutput.bundlesHash, eternalTransactionsHash: proof2.publicOutput.eternalTransactionsHash, incomingMessagesHash: proof2.publicOutput.incomingMessagesHash, - pendingSTBatchesHash: proof2.publicOutput.pendingSTBatchesHash, - witnessedRootsHash: proof2.publicOutput.witnessedRootsHash, + // pendingSTBatchesHash: proof2.publicOutput.pendingSTBatchesHash, + // witnessedRootsHash: proof2.publicOutput.witnessedRootsHash, }); } @@ -400,21 +386,18 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< const merge = prover.merge.bind(prover); const program = ZkProgram({ - name: "BlockProver", + name: "TransactionProver", publicInput: TransactionProverPublicInput, publicOutput: TransactionProverPublicOutput, methods: { proveTransaction: { - privateInputs: [ - DynamicRuntimeProof, - BlockProverSingleTransactionExecutionData, - ], + privateInputs: [DynamicRuntimeProof, TransactionProverExecutionData], async method( publicInput: TransactionProverPublicInput, runtimeProof: DynamicRuntimeProof, - executionData: BlockProverSingleTransactionExecutionData + executionData: TransactionProverExecutionData ) { return { publicOutput: await proveTransaction( @@ -430,21 +413,24 @@ export class TransactionProverZkProgrammable extends ZkProgrammable< privateInputs: [ DynamicRuntimeProof, DynamicRuntimeProof, - BlockProverMultiTransactionExecutionData, + TransactionProverExecutionData, + TransactionProverExecutionData, ], async method( publicInput: TransactionProverPublicInput, runtimeProof1: DynamicRuntimeProof, runtimeProof2: DynamicRuntimeProof, - executionData: BlockProverMultiTransactionExecutionData + executionData1: TransactionProverExecutionData, + executionData2: TransactionProverExecutionData ) { return { publicOutput: await proveTransactions( publicInput, runtimeProof1, runtimeProof2, - executionData + executionData1, + executionData2 ), }; }, @@ -537,7 +523,7 @@ export class TransactionProver public proveTransaction( publicInput: TransactionProverPublicInput, runtimeProof: DynamicRuntimeProof, - executionData: BlockProverSingleTransactionExecutionData + executionData: TransactionProverExecutionData ): Promise { return this.zkProgrammable.proveTransaction( publicInput, @@ -550,13 +536,15 @@ export class TransactionProver publicInput: TransactionProverPublicInput, runtimeProof1: DynamicRuntimeProof, runtimeProof2: DynamicRuntimeProof, - executionData: BlockProverMultiTransactionExecutionData + executionData1: TransactionProverExecutionData, + executionData2: TransactionProverExecutionData ): Promise { return this.zkProgrammable.proveTransactions( publicInput, runtimeProof1, runtimeProof2, - executionData + executionData1, + executionData2 ); } diff --git a/packages/protocol/src/prover/utils.ts b/packages/protocol/src/prover/utils.ts index 5b67de0f1..9adc0e68b 100644 --- a/packages/protocol/src/prover/utils.ts +++ b/packages/protocol/src/prover/utils.ts @@ -43,8 +43,10 @@ export function constructBatch( // TODO How does this interact with the RuntimeMethodExecutionContext when executing runtimemethods? export async function executeHooks( contextArguments: RuntimeMethodExecutionData, + hookName: string, method: () => Promise, - isMessage: Bool | undefined = undefined + // This can be either that the tx is a message, or we are inside a dummy block hook + skipEnforceStatus: Bool | undefined = undefined ) { const executionContext = container.resolve(RuntimeMethodExecutionContext); executionContext.clear(); @@ -63,16 +65,16 @@ export async function executeHooks( executionContext.current().result; // See https://github.com/proto-kit/framework/issues/321 for why we do this here - if (isMessage !== undefined) { + if (skipEnforceStatus !== undefined) { // isMessage is defined for all tx hooks status - .or(isMessage) + .or(skipEnforceStatus) .assertTrue( - `Transaction hook call failed for non-message tx: ${statusMessage ?? "-"}` + `${hookName} hook call failed for non-message tx: ${statusMessage ?? "-"}` ); } else { // isMessage is undefined for all block hooks - status.assertTrue(`Block hook call failed: ${statusMessage ?? "-"}`); + status.assertTrue(`${hookName} hook call failed: ${statusMessage ?? "-"}`); } return { @@ -91,7 +93,7 @@ export function addTransactionToBundle< const transactionHash = transaction.hash(); // Append tx to transaction list - state.transactionList.pushIf(transactionHash, isMessage.not()); + state.transactionList.push(transactionHash); // Append tx to eternal transaction list // TODO Change that to the a sequence-state compatible transaction struct diff --git a/packages/protocol/src/settlement/contracts/settlement/SettlementBase.ts b/packages/protocol/src/settlement/contracts/settlement/SettlementBase.ts index cc6401c03..e37e35a60 100644 --- a/packages/protocol/src/settlement/contracts/settlement/SettlementBase.ts +++ b/packages/protocol/src/settlement/contracts/settlement/SettlementBase.ts @@ -1,5 +1,4 @@ import { - Bool, DeployArgs, DynamicProof, Field, @@ -199,14 +198,19 @@ export abstract class SettlementBase "OutputNetworkState witness not valid" ); - blockProof.publicOutput.closed.assertEquals( - Bool(true), - "Supplied proof is not a closed BlockProof" + // Check remainders are zero + blockProof.publicOutput.remainders.bundlesHash.assertEquals( + Field(0), + "Bundles list has not been fully proven" ); - blockProof.publicOutput.pendingSTBatchesHash.assertEquals( + blockProof.publicOutput.remainders.pendingSTBatchesHash.assertEquals( Field(0), "Supplied proof is has outstanding STs to be proven" ); + blockProof.publicOutput.remainders.witnessedRootsHash.assertEquals( + Field(0), + "Supplied proof is has outstanding witnessed roots hashes to be proven" + ); // Execute onSettlementHooks for additional checks const stateRecord: SettlementStateRecord = { diff --git a/packages/protocol/src/utils/MinaPrefixedProvableHashList.ts b/packages/protocol/src/utils/MinaPrefixedProvableHashList.ts index eaa2ff8f8..bfa6cb98e 100644 --- a/packages/protocol/src/utils/MinaPrefixedProvableHashList.ts +++ b/packages/protocol/src/utils/MinaPrefixedProvableHashList.ts @@ -47,11 +47,15 @@ export class MinaPrefixedProvableHashList< public constructor( valueType: ProvablePure, public readonly prefix: string, - internalCommitment: Field = Field(0) + internalCommitment?: Field ) { super(valueType, internalCommitment); } + public empty(): Field { + return Field(0); + } + protected hash(elements: Field[]): Field { const init = salt(this.prefix); const digest = Poseidon.update(init, elements); @@ -60,7 +64,7 @@ export class MinaPrefixedProvableHashList< } export class MinaActionsHashList extends MinaPrefixedProvableHashList { - public constructor(internalCommitment: Field = Field(0)) { + public constructor(internalCommitment?: Field) { super(Field, MINA_PREFIXES.sequenceEvents, internalCommitment); } } diff --git a/packages/protocol/src/utils/PrefixedProvableHashList.ts b/packages/protocol/src/utils/PrefixedProvableHashList.ts index a31b78eea..4e9d4a755 100644 --- a/packages/protocol/src/utils/PrefixedProvableHashList.ts +++ b/packages/protocol/src/utils/PrefixedProvableHashList.ts @@ -15,6 +15,10 @@ export class PrefixedProvableHashList extends ProvableHashList { this.prefix = stringToField(prefix); } + public empty(): Field { + return Field(0); + } + protected hash(elements: Field[]): Field { return Poseidon.hash([this.prefix, ...elements]); } diff --git a/packages/protocol/src/utils/ProvableHashList.ts b/packages/protocol/src/utils/ProvableHashList.ts index 9b824d60a..777f41f65 100644 --- a/packages/protocol/src/utils/ProvableHashList.ts +++ b/packages/protocol/src/utils/ProvableHashList.ts @@ -23,23 +23,31 @@ export type VerifiedTransition = { * Utilities for creating a hash list from a given value type. */ export abstract class ProvableHashList { + public commitment: Field; + public constructor( protected readonly valueType: ProvablePure, - public commitment: Field = Field(0), + commitment?: Field | undefined, private unconstrainedList: Unconstrained< ProvableHashListData[] > = Unconstrained.from([]) - ) {} + ) { + this.commitment = commitment ?? this.empty(); + } + + protected abstract empty(): Field; protected abstract hash(elements: Field[]): Field; private pushUnconstrained(preimage: Field, value: Value) { - const valueConstant = this.valueType.fromFields( - this.valueType.toFields(value).map((field) => field.toConstant()) - ); - this.unconstrainedList.get().push({ - preimage: preimage.toConstant(), - value: valueConstant, + this.unconstrainedList.updateAsProver((array) => { + return [ + ...array, + { + preimage: preimage.toConstant(), + value: Provable.toConstant(this.valueType, value), + }, + ]; }); } @@ -61,11 +69,24 @@ export abstract class ProvableHashList { this.commitment = to; } + public fastForwardIf( + transition: VerifiedTransition, + condition: Bool, + message: string = "some hashlist" + ) { + const { from, to } = transition; + + condition + .implies(from.equals(this.commitment)) + .assertTrue(`From-commitment for ${message} not matching`); + + this.commitment = Provable.if(condition, to, this.commitment); + } + public witnessTip(preimage: Field, value: Value): Bool { - return this.hash([ - this.commitment, - ...this.valueType.toFields(value), - ]).equals(this.commitment); + return this.hash([preimage, ...this.valueType.toFields(value)]).equals( + this.commitment + ); } /** @@ -111,6 +132,10 @@ export abstract class ProvableHashList { return this.commitment; } + public isEmpty(): Bool { + return this.commitment.equals(this.empty()); + } + public getUnconstrainedValues(): Unconstrained< ProvableHashListData[] > { @@ -122,4 +147,8 @@ export class DefaultProvableHashList extends ProvableHashList { public hash(elements: Field[]): Field { return Poseidon.hash(elements); } + + public empty(): Field { + return Field(0); + } } diff --git a/packages/sequencer/package.json b/packages/sequencer/package.json index 79370f6cc..2fe6b483e 100644 --- a/packages/sequencer/package.json +++ b/packages/sequencer/package.json @@ -11,7 +11,7 @@ "test:file": "node --experimental-vm-modules --experimental-wasm-modules ../../node_modules/jest/bin/jest.js", "test": "npm run test:file -- ./test/**", "test:watch": "npm run test:file -- ./test/** --watch", - "integration": "npm run test:file -- ./test-integration/** --runInBand", + "test:integration": "npm run test:file -- ./test-integration/** --runInBand", "start": "npm run build && node --experimental-vm-modules --experimental-wasm-modules --es-module-specifier-resolution=node ./dist/src/entry.js" }, "main": "dist/index.js", diff --git a/packages/sequencer/src/mempool/private/PrivateMempool.ts b/packages/sequencer/src/mempool/private/PrivateMempool.ts index e750060ea..ca82f42cc 100644 --- a/packages/sequencer/src/mempool/private/PrivateMempool.ts +++ b/packages/sequencer/src/mempool/private/PrivateMempool.ts @@ -7,11 +7,10 @@ import { import { container, inject } from "tsyringe"; import { AccountStateHook, - BlockHashMerkleTree, MandatoryProtocolModulesRecord, NetworkState, Protocol, - ProvableHookBlockState, + ProvableHookTransactionState, RuntimeMethodExecutionContext, RuntimeMethodExecutionData, StateServiceProvider, @@ -170,10 +169,7 @@ export class PrivateMempool // TODO This is not sound currently as the prover state changes all the time // in the actual blockprover. We need to properly simulate that - const proverState: ProvableHookBlockState = { - blockHashRoot: Field( - previousBlock?.result.blockHashRoot ?? BlockHashMerkleTree.EMPTY_ROOT - ), + const proverState: ProvableHookTransactionState = { eternalTransactionsHash: previousBlock?.block.toEternalTransactionsHash ?? Field(0), transactionsHash: previousBlock?.block.transactionsHash ?? Field(0), diff --git a/packages/sequencer/src/protocol/production/flow/BatchFlow.ts b/packages/sequencer/src/protocol/production/flow/BatchFlow.ts index 63720c4fc..824970891 100644 --- a/packages/sequencer/src/protocol/production/flow/BatchFlow.ts +++ b/packages/sequencer/src/protocol/production/flow/BatchFlow.ts @@ -5,14 +5,9 @@ import { Protocol, StateTransitionProverPublicInput, StateTransitionProverPublicOutput, + TransactionProverPublicInput, } from "@proto-kit/protocol"; -import { - isFull, - mapSequential, - MAX_FIELD, - Nullable, - range, -} from "@proto-kit/common"; +import { isFull, mapSequential, Nullable } from "@proto-kit/common"; import { FlowCreator } from "../../../worker/flow/Flow"; import { NewBlockProvingParameters, NewBlockTask } from "../tasks/NewBlockTask"; @@ -33,7 +28,7 @@ export class BatchFlow { private readonly blockProvingTask: NewBlockTask, private readonly blockReductionTask: BlockReductionTask, private readonly stateTransitionFlow: StateTransitionFlow, - private readonly blockFlow: BlockFlow, + private readonly transactionFlow: BlockFlow, @inject("Protocol") private readonly protocol: Protocol, @inject("Tracer") @@ -42,7 +37,7 @@ export class BatchFlow { private isBlockProofsMergable(a: BlockProof, b: BlockProof): boolean { // TODO Proper replication of merge logic - const part1 = a.publicOutput.stateRoot + return a.publicOutput.stateRoot .equals(b.publicInput.stateRoot) .and(a.publicOutput.blockHashRoot.equals(b.publicInput.blockHashRoot)) .and( @@ -53,26 +48,7 @@ export class BatchFlow { b.publicInput.eternalTransactionsHash ) ) - .and(a.publicOutput.closed.equals(b.publicOutput.closed)) .toBoolean(); - - const proof1Closed = a.publicOutput.closed; - const proof2Closed = b.publicOutput.closed; - - const blockNumberProgressionValid = a.publicOutput.blockNumber.equals( - b.publicInput.blockNumber - ); - - const isValidTransactionMerge = a.publicInput.blockNumber - .equals(MAX_FIELD) - .and(blockNumberProgressionValid) - .and(proof1Closed.or(proof2Closed).not()); - - const isValidClosedMerge = proof1Closed - .and(proof2Closed) - .and(blockNumberProgressionValid); - - return part1 && isValidClosedMerge.or(isValidTransactionMerge).toBoolean(); } private async pushBlockInput( @@ -92,6 +68,14 @@ export class BatchFlow { ); } + private dummyTransactionProof() { + return this.protocol.transactionProver.zkProgrammable.zkProgram[0].Proof.dummy( + TransactionProverPublicInput.empty(), + TransactionProverPublicInput.empty(), + 2 + ); + } + @trace("batch.prove", ([, batchId]) => ({ batchId })) public async executeBatch(batch: BatchTrace, batchId: number) { const batchFlow = new ReductionTaskFlow( @@ -105,24 +89,14 @@ export class BatchFlow { this.flowCreator ); - const map: Record< - number, - Nullable - > = Object.fromEntries( - batch.blocks.map((blockTrace, i) => [ - i, - { - params: blockTrace.blockParams, - input1: undefined, - input2: undefined, - }, - ]) - ); + const lastBlockProofCollector: Nullable = { + params: batch.blocks.at(-1)!.block, + input1: undefined, + input2: undefined, + }; const dummySTProof = await this.dummySTProof(); - range(0, batch.blocks.length - 1).forEach((index) => { - map[index].input1 = dummySTProof; - }); + const dummyTransactionProof = await this.dummyTransactionProof(); // TODO Make sure we use deferErrorsTo to everywhere (preferably with a nice pattern) // Currently, a lot of errors just get eaten and the chain just halts with no @@ -131,18 +105,36 @@ export class BatchFlow { batch.stateTransitionTrace, batchId, async (proof) => { - const index = batch.blocks.length - 1; - map[index].input1 = proof; - await this.pushBlockInput(map[index], batchFlow); + lastBlockProofCollector.input1 = proof; + await this.pushBlockInput(lastBlockProofCollector, batchFlow); } ); - await mapSequential(batch.blocks, async (blockTrace, blockIndex) => { - await this.blockFlow.executeBlock(blockTrace, async (proof) => { - map[blockIndex].input2 = proof; - await this.pushBlockInput(map[blockIndex], batchFlow); - }); - }); + // TODO Proper height + await this.transactionFlow.createTransactionProof( + batch.blocks[0].heights[0], + batch.transactions, + async (proof) => { + lastBlockProofCollector.input2 = proof; + await this.pushBlockInput(lastBlockProofCollector, batchFlow); + } + ); + + // Push all blocks except the last one with dummy proofs + // except the last one, which will wait on the two proofs to complete + await mapSequential( + batch.blocks.slice(0, batch.blocks.length - 1), + async (blockTrace) => { + await this.pushBlockInput( + { + input1: dummySTProof, + input2: dummyTransactionProof, + params: blockTrace.block, + }, + batchFlow + ); + } + ); return await new Promise((res, rej) => { batchFlow.onCompletion(async (result) => res(result)); diff --git a/packages/sequencer/src/protocol/production/flow/BlockFlow.ts b/packages/sequencer/src/protocol/production/flow/BlockFlow.ts index 542bd1ca3..527dd0833 100644 --- a/packages/sequencer/src/protocol/production/flow/BlockFlow.ts +++ b/packages/sequencer/src/protocol/production/flow/BlockFlow.ts @@ -7,16 +7,19 @@ import { TransactionProverPublicOutput, } from "@proto-kit/protocol"; import { Field } from "o1js"; +import { mapSequential } from "@proto-kit/common"; +// eslint-disable-next-line import/no-extraneous-dependencies +import chunk from "lodash/chunk"; import { TransactionProvingTask } from "../tasks/TransactionProvingTask"; -import { TransactionProvingTaskParameters } from "../tasks/serializers/types/TransactionProvingTypes"; import { FlowCreator } from "../../../worker/flow/Flow"; -import { BlockTrace } from "../tracing/BlockTracingService"; import { TransactionReductionTask } from "../tasks/TransactionReductionTask"; +import { TransactionTrace } from "../tracing/TransactionTracingService"; import { ReductionTaskFlow } from "./ReductionTaskFlow"; import { TransactionFlow } from "./TransactionFlow"; +// TODO Rename to TransactionFlow @injectable() @scoped(Lifecycle.ContainerScoped) export class BlockFlow { @@ -24,16 +27,16 @@ export class BlockFlow { private readonly flowCreator: FlowCreator, @inject("Protocol") private readonly protocol: Protocol, - private readonly transactionProvingTask: TransactionProvingTask, - private readonly transactionReductionTask: TransactionReductionTask, - private readonly transactionFlow: TransactionFlow + private readonly runtimeFlow: TransactionFlow, + private readonly transactionTask: TransactionProvingTask, + private readonly transactionMergeTask: TransactionReductionTask ) {} - private async dummyTransactionProof(trace: BlockTrace) { + private async dummyTransactionProof() { const publicInput = { - ...trace.blockParams.publicInput, - networkStateHash: Field(0), - transactionsHash: Field(0), + bundlesHash: Field(0), + eternalTransactionsHash: Field(0), + incomingMessagesHash: Field(0), } satisfies TransactionProverPublicInput; // TODO Set publicInput.stateRoot to result after block hooks! @@ -48,24 +51,19 @@ export class BlockFlow { ); } - private async executeTransactions( - trace: BlockTrace - ): Promise< - ReductionTaskFlow - > { - const transactionFlow = new ReductionTaskFlow( + private async proveTransactions(height: string, traces: TransactionTrace[]) { + const flow = new ReductionTaskFlow( { - name: `transactions-${trace.height}`, - inputLength: trace.transactions.length, - mappingTask: this.transactionProvingTask, - reductionTask: this.transactionReductionTask, - + name: `transaction-${height}`, + inputLength: Math.ceil(traces.length / 2), + mappingTask: this.transactionTask, + reductionTask: this.transactionMergeTask, mergableFunction: (a, b) => - a.publicOutput.transactionsHash - .equals(b.publicInput.transactionsHash) + a.publicOutput.eternalTransactionsHash + .equals(b.publicInput.eternalTransactionsHash) .and( - a.publicInput.networkStateHash.equals( - b.publicInput.networkStateHash + a.publicOutput.incomingMessagesHash.equals( + b.publicInput.incomingMessagesHash ) ) .toBoolean(), @@ -73,32 +71,30 @@ export class BlockFlow { this.flowCreator ); - await transactionFlow.flow.forEach( - trace.transactions, - async (transactionTrace, txIndex) => { - await this.transactionFlow.proveRuntimes( - transactionTrace, - trace.height, - txIndex, - async (parameters) => { - await transactionFlow.pushInput(parameters); - } - ); - } - ); + await mapSequential(chunk(traces, 2), async (traceChunk, index) => { + await this.runtimeFlow.proveRuntimes( + traceChunk, + height, + index, + async (result) => { + await flow.pushInput(result); + } + ); + }); - return transactionFlow; + return flow; } - public async executeBlock( - trace: BlockTrace, + public async createTransactionProof( + height: string, + trace: TransactionTrace[], callback: (proof: TransactionProof) => Promise ) { - if (trace.transactions.length === 0) { - const proof = await this.dummyTransactionProof(trace); + if (trace.length === 0) { + const proof = await this.dummyTransactionProof(); await callback(proof); } else { - const flow = await this.executeTransactions(trace); + const flow = await this.proveTransactions(height, trace); flow.onCompletion(async (result) => { await callback(result); }); diff --git a/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts b/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts index d124a352c..5d968758a 100644 --- a/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts +++ b/packages/sequencer/src/protocol/production/flow/TransactionFlow.ts @@ -1,10 +1,10 @@ import { injectable } from "tsyringe"; +import { assertSizeOneOrTwo } from "@proto-kit/common"; import { Flow, FlowCreator } from "../../../worker/flow/Flow"; import { RuntimeProof, TransactionProvingTaskParameters, - TransactionProvingType, } from "../tasks/serializers/types/TransactionProvingTypes"; import { RuntimeProvingTask } from "../tasks/RuntimeProvingTask"; import { TransactionTrace } from "../tracing/TransactionTracingService"; @@ -20,31 +20,31 @@ export class TransactionFlow { flow: Flow<{ runtimeProofs: { proof: RuntimeProof; index: number }[]; }>, - trace: TransactionTrace, + trace: [TransactionTrace] | [TransactionTrace, TransactionTrace], callback: (params: TransactionProvingTaskParameters) => Promise ) { - const requiredLength = trace.type === TransactionProvingType.MULTI ? 2 : 1; + const requiredLength = trace.length; if (flow.state.runtimeProofs.length === requiredLength) { let parameters: TransactionProvingTaskParameters; - if (trace.type === TransactionProvingType.MULTI) { + if (requiredLength === 2) { // Sort ascending const sorted = flow.state.runtimeProofs.sort( ({ index: a }, { index: b }) => a - b ); - parameters = { - type: trace.type, - parameters: trace.transaction, - proof1: sorted[0].proof, - proof2: sorted[1].proof, - }; + + parameters = [ + { parameters: trace[0].transaction, proof: sorted[0].proof }, + { parameters: trace[1].transaction, proof: sorted[1].proof }, + ]; } else { - parameters = { - type: trace.type, - parameters: trace.transaction, - proof1: flow.state.runtimeProofs[0].proof, - }; + parameters = [ + { + parameters: trace[0].transaction, + proof: flow.state.runtimeProofs[0].proof, + }, + ]; } await callback(parameters); @@ -52,13 +52,15 @@ export class TransactionFlow { } public async proveRuntimes( - trace: TransactionTrace, + trace: TransactionTrace[], blockHeight: string, txIndex: number, callback: (params: TransactionProvingTaskParameters) => Promise ) { + assertSizeOneOrTwo(trace); + const name = `transaction-${blockHeight}-${txIndex}${ - trace.type === TransactionProvingType.MULTI ? "-double" : "" + trace.length === 2 ? "-double" : "" }`; const flow = this.flowCreator.createFlow<{ runtimeProofs: { proof: RuntimeProof; index: number }[]; @@ -66,24 +68,17 @@ export class TransactionFlow { runtimeProofs: [], }); - await flow.pushTask( - this.runtimeProvingTask, - trace.runtime[0], - async (proof) => { - flow.state.runtimeProofs.push({ proof, index: 0 }); - await this.resolveTransactionFlow(flow, trace, callback); - } + await Promise.all( + trace.map(async (transaction, index) => { + await flow.pushTask( + this.runtimeProvingTask, + transaction.runtime, + async (proof) => { + flow.state.runtimeProofs.push({ proof, index }); + await this.resolveTransactionFlow(flow, trace, callback); + } + ); + }) ); - - if (trace.type === TransactionProvingType.MULTI) { - await flow.pushTask( - this.runtimeProvingTask, - trace.runtime[1], - async (proof) => { - flow.state.runtimeProofs.push({ proof, index: 1 }); - await this.resolveTransactionFlow(flow, trace, callback); - } - ); - } } } diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts b/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts index 18010fa28..f9e57eb16 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockProductionService.ts @@ -10,7 +10,7 @@ import { reduceStateTransitions, RuntimeTransaction, StateServiceProvider, - toProvableHookBlockState, + toBeforeBlockHookArgument, TransactionHashList, } from "@proto-kit/protocol"; import { Field } from "o1js"; @@ -128,7 +128,7 @@ export class BlockProductionService { // Get used networkState by executing beforeBlock() hooks const beforeHookResult = await this.executeBeforeBlockHook( - toProvableHookBlockState(blockState), + toBeforeBlockHookArgument(blockState), lastResult.afterNetworkState, stateService ); diff --git a/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts b/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts index 24dc9852a..e3dc6ecdb 100644 --- a/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/TransactionExecutionService.ts @@ -22,6 +22,7 @@ import { ProvableStateTransition, DefaultProvableHashList, addTransactionToBundle, + TransactionProverState, } from "@proto-kit/protocol"; import { Bool, Field } from "o1js"; import { AreProofsEnabled, log, mapSequential } from "@proto-kit/common"; @@ -58,12 +59,10 @@ export type RuntimeContextReducedExecutionResult = Pick< >; export type BlockTrackers = Pick< - BlockProverState, - | "transactionList" - | "eternalTransactionsList" - | "incomingMessages" - | "blockHashRoot" ->; + TransactionProverState, + "eternalTransactionsList" | "incomingMessages" | "transactionList" +> & + Pick; function getAreProofsEnabledFromModule( module: RuntimeModule diff --git a/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts b/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts index 338ce9514..de5c15015 100644 --- a/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/NewBlockTask.ts @@ -8,10 +8,11 @@ import { StateTransitionProvable, BlockHashMerkleTreeWitness, MandatoryProtocolModulesRecord, - WitnessedRootWitness, TransactionProof, BlockProof, TransactionProvable, + BlockArguments, + BlockArgumentsBatch, } from "@proto-kit/protocol"; import { Bool } from "o1js"; import { @@ -28,14 +29,19 @@ import type { TaskStateRecord } from "../tracing/BlockTracingService"; import { NewBlockProvingParametersSerializer } from "./serializers/NewBlockProvingParametersSerializer"; import { executeWithPrefilledStateService } from "./TransactionProvingTask"; +export type NewBlockArguments = { + args: BlockArguments; + startingStateBeforeHook: TaskStateRecord; + startingStateAfterHook: TaskStateRecord; +}; + export interface NewBlockProverParameters { publicInput: BlockProverPublicInput; networkState: NetworkState; blockWitness: BlockHashMerkleTreeWitness; deferSTProof: Bool; - afterBlockRootWitness: WitnessedRootWitness; - startingStateBeforeHook: TaskStateRecord; - startingStateAfterHook: TaskStateRecord; + deferTransactionProof: Bool; + blocks: NewBlockArguments[]; } export type NewBlockProvingParameters = PairingDerivedInput< @@ -96,32 +102,41 @@ export class NewBlockTask const { networkState, blockWitness, - startingStateBeforeHook, - startingStateAfterHook, publicInput, deferSTProof, - afterBlockRootWitness, + deferTransactionProof, + blocks, } = parameters; - await this.blockProver.proveBlock( - publicInput, - networkState, - blockWitness, - input1, - deferSTProof, - afterBlockRootWitness, - input2 - ); + const blockArgumentBatch = new BlockArgumentsBatch({ + batch: blocks.map((block) => block.args), + }); + + const stateRecords = blocks.flatMap((block) => [ + block.startingStateBeforeHook, + block.startingStateAfterHook, + ]); await executeWithPrefilledStateService( this.protocol.stateServiceProvider, - [startingStateBeforeHook, startingStateAfterHook], - async () => {} + stateRecords, + async () => { + await this.blockProver.proveBlockBatch( + publicInput, + networkState, + blockWitness, + input1, + deferSTProof, + input2, + deferTransactionProof, + blockArgumentBatch + ); + } ); return await executeWithPrefilledStateService( this.protocol.stateServiceProvider, - [startingStateBeforeHook, startingStateAfterHook], + stateRecords, async () => await this.executionContext.current().result.prove() ); diff --git a/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts b/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts index de4b80b63..88c77d4db 100644 --- a/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/TransactionProvingTask.ts @@ -21,10 +21,7 @@ import { TaskWorkerModule } from "../../../worker/worker/TaskWorkerModule"; import type { TaskStateRecord } from "../tracing/BlockTracingService"; import { TransactionProvingTaskParameterSerializer } from "./serializers/TransactionProvingTaskParameterSerializer"; -import { - TransactionProvingTaskParameters, - TransactionProvingType, -} from "./serializers/types/TransactionProvingTypes"; +import { TransactionProvingTaskParameters } from "./serializers/types/TransactionProvingTypes"; export async function executeWithPrefilledStateService( stateServiceProvider: StateServiceProvider, @@ -93,26 +90,31 @@ export class TransactionProvingTask public async compute( input: TransactionProvingTaskParameters ): Promise { + const startingState = input.flatMap((i) => i.parameters.startingState); + await executeWithPrefilledStateService( this.protocol.stateServiceProvider, - input.parameters.startingState, + startingState, async () => { - const { type, parameters } = input; + const { parameters, proof } = input[0]; - const proof1 = DynamicRuntimeProof.fromProof(input.proof1); + const proof1 = DynamicRuntimeProof.fromProof(proof); - if (type === TransactionProvingType.SINGLE) { + if (input.length === 1) { await this.transactionProver.proveTransaction( parameters.publicInput, proof1, parameters.executionData ); } else { + const { parameters: parameters2, proof: proof2 } = input[1]; + await this.transactionProver.proveTransactions( parameters.publicInput, proof1, - DynamicRuntimeProof.fromProof(input.proof2), - parameters.executionData + DynamicRuntimeProof.fromProof(proof2), + parameters.executionData, + parameters2.executionData ); } } @@ -120,7 +122,7 @@ export class TransactionProvingTask return await executeWithPrefilledStateService( this.protocol.stateServiceProvider, - input.parameters.startingState, + startingState, async () => await this.executionContext.current().result.prove() ); diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts index 58771b1df..d0f8bc122 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/NewBlockProvingParametersSerializer.ts @@ -1,4 +1,5 @@ import { + BlockArguments, BlockHashMerkleTreeWitness, BlockProverPublicInput, NetworkState, @@ -9,7 +10,6 @@ import { TransactionProof, TransactionProverPublicInput, TransactionProverPublicOutput, - WitnessedRootWitness, } from "@proto-kit/protocol"; import { Bool } from "o1js"; @@ -30,10 +30,13 @@ interface JsonType { publicInput: ReturnType; networkState: ReturnType; blockWitness: ReturnType; - startingStateBeforeHook: JSONEncodableState; - startingStateAfterHook: JSONEncodableState; deferSTProof: boolean; - afterBlockRootWitness: ReturnType; + deferTransactionProof: boolean; + blocks: { + startingStateBeforeHook: JSONEncodableState; + startingStateAfterHook: JSONEncodableState; + args: ReturnType; + }[]; }; } @@ -71,19 +74,22 @@ export class NewBlockProvingParametersSerializer input.params.blockWitness ), - startingStateBeforeHook: DecodedStateSerializer.toJSON( - input.params.startingStateBeforeHook - ), + blocks: input.params.blocks.map((block) => { + return { + startingStateBeforeHook: DecodedStateSerializer.toJSON( + block.startingStateBeforeHook + ), - startingStateAfterHook: DecodedStateSerializer.toJSON( - input.params.startingStateAfterHook - ), + startingStateAfterHook: DecodedStateSerializer.toJSON( + block.startingStateAfterHook + ), - deferSTProof: input.params.deferSTProof.toBoolean(), + args: BlockArguments.toJSON(block.args), + }; + }), - afterBlockRootWitness: WitnessedRootWitness.toJSON( - input.params.afterBlockRootWitness - ), + deferSTProof: input.params.deferSTProof.toBoolean(), + deferTransactionProof: input.params.deferTransactionProof.toBoolean(), }, } satisfies JsonType); } @@ -108,19 +114,22 @@ export class NewBlockProvingParametersSerializer BlockHashMerkleTreeWitness.fromJSON(jsonObject.params.blockWitness) ), - startingStateBeforeHook: DecodedStateSerializer.fromJSON( - jsonObject.params.startingStateBeforeHook - ), + blocks: jsonObject.params.blocks.map((block) => { + return { + startingStateBeforeHook: DecodedStateSerializer.fromJSON( + block.startingStateBeforeHook + ), - startingStateAfterHook: DecodedStateSerializer.fromJSON( - jsonObject.params.startingStateBeforeHook - ), + startingStateAfterHook: DecodedStateSerializer.fromJSON( + block.startingStateBeforeHook + ), - deferSTProof: Bool(jsonObject.params.deferSTProof), + args: BlockArguments.fromJSON(block.args), + }; + }), - afterBlockRootWitness: WitnessedRootWitness.fromJSON( - jsonObject.params.afterBlockRootWitness - ), + deferSTProof: Bool(jsonObject.params.deferSTProof), + deferTransactionProof: Bool(jsonObject.params.deferTransactionProof), }, }; } diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts index 3e08b804e..2cb30a520 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/TransactionProvingTaskParameterSerializer.ts @@ -1,19 +1,20 @@ import { - BlockProverPublicInput, MethodPublicOutput, - NetworkState, ReturnType, RuntimeTransaction, + TransactionProverPublicInput, TransactionProverTransactionArguments, + TransactionProverArguments, } from "@proto-kit/protocol"; import { JsonProof, Signature } from "o1js"; +import { assertSizeOneOrTwo, mapSequential } from "@proto-kit/common"; import { TaskSerializer } from "../../../../worker/flow/Task"; import { ProofTaskSerializer } from "../../../../helpers/utils"; import { + TransactionProverTaskParameters, TransactionProvingTaskParameters, - TransactionProvingType, } from "./types/TransactionProvingTypes"; import { DecodedStateSerializer, @@ -21,6 +22,20 @@ import { } from "./DecodedStateSerializer"; import { RuntimeVerificationKeyAttestationSerializer } from "./RuntimeVerificationKeyAttestationSerializer"; +export type TransactionProvingTaskParametersJSON = { + parameters: TransactionProverTaskParametersJSON; + proof: JsonProof; +}[]; + +export type TransactionProverTaskParametersJSON = { + startingState: JSONEncodableState[]; + publicInput: ReturnType; + executionData: { + transaction: TransactionProverTransactionArgumentsJSON; + args: ReturnType; + }; +}; + export type TransactionProverTransactionArgumentsJSON = { transaction: ReturnType; signature: ReturnType; @@ -29,38 +44,6 @@ export type TransactionProverTransactionArgumentsJSON = { >; }; -export type SingleExecutionDataJSON = { - transaction: TransactionProverTransactionArgumentsJSON; - networkState: ReturnType; -}; - -export type MultiExecutionDataJSON = { - transaction1: TransactionProverTransactionArgumentsJSON; - transaction2: TransactionProverTransactionArgumentsJSON; - networkState: ReturnType; -}; - -export type TransactionProverTaskParametersJSON< - ExecutionData extends SingleExecutionDataJSON | MultiExecutionDataJSON, -> = { - startingState: JSONEncodableState[]; - publicInput: ReturnType; - executionData: ExecutionData; -}; - -export type TransactionProvingTaskParametersJSON = - | { - type: TransactionProvingType.SINGLE; - proof1: JsonProof; - parameters: TransactionProverTaskParametersJSON; - } - | { - type: TransactionProvingType.MULTI; - proof1: JsonProof; - proof2: JsonProof; - parameters: TransactionProverTaskParametersJSON; - }; - export class TransactionProvingTaskParameterSerializer implements TaskSerializer { @@ -100,61 +83,35 @@ export class TransactionProvingTaskParameterSerializer }; } - public toJSON(input: TransactionProvingTaskParameters): string { - let taskParamsJson: TransactionProvingTaskParametersJSON; + public toJSON(inputs: TransactionProvingTaskParameters): string { + const taskParamsJson: TransactionProvingTaskParametersJSON = inputs.map( + (input) => { + const { parameters, proof } = input; + const { executionData } = parameters; - const { type, parameters } = input; + const proofJSON = this.runtimeProofSerializer.toJSONProof(proof); - const partialParameters = { - publicInput: BlockProverPublicInput.toJSON(parameters.publicInput), + const parametersJSON: TransactionProverTaskParametersJSON = { + publicInput: TransactionProverPublicInput.toJSON( + parameters.publicInput + ), - startingState: parameters.startingState.map((stateRecord) => - DecodedStateSerializer.toJSON(stateRecord) - ), - }; + startingState: parameters.startingState.map((stateRecord) => + DecodedStateSerializer.toJSON(stateRecord) + ), - // The reason we can't just use the structs toJSON is that the VerificationKey - // toJSON and fromJSON isn't consistent -> i.e. the serialization doesn't work - // the same both ways. We fix that in our custom serializer - if (type === TransactionProvingType.SINGLE) { - const { executionData } = parameters; - const executionDataJson: SingleExecutionDataJSON = { - networkState: NetworkState.toJSON(executionData.networkState), - transaction: this.transactionProverArgumentsToJson( - executionData.transaction - ), - }; + executionData: { + args: TransactionProverArguments.toJSON(executionData.args), - taskParamsJson = { - type, - proof1: this.runtimeProofSerializer.toJSONProof(input.proof1), - parameters: { - ...partialParameters, - executionData: executionDataJson, - }, - }; - } else { - const { executionData } = parameters; - const executionDataJson: MultiExecutionDataJSON = { - networkState: NetworkState.toJSON(executionData.networkState), - transaction1: this.transactionProverArgumentsToJson( - executionData.transaction1 - ), - transaction2: this.transactionProverArgumentsToJson( - executionData.transaction2 - ), - }; + transaction: this.transactionProverArgumentsToJson( + executionData.transaction + ), + }, + }; - taskParamsJson = { - type, - proof1: this.runtimeProofSerializer.toJSONProof(input.proof1), - proof2: this.runtimeProofSerializer.toJSONProof(input.proof2), - parameters: { - ...partialParameters, - executionData: executionDataJson, - }, - }; - } + return { parameters: parametersJSON, proof: proofJSON }; + } + ); return JSON.stringify(taskParamsJson); } @@ -166,58 +123,36 @@ export class TransactionProvingTaskParameterSerializer const jsonReadyObject: TransactionProvingTaskParametersJSON = JSON.parse(json); - const { type, parameters } = jsonReadyObject; + const result = await mapSequential(jsonReadyObject, async (input) => { + const { parameters, proof } = input; - const partialParameters = { - publicInput: BlockProverPublicInput.fromJSON(parameters.publicInput), + const decodedProof = + await this.runtimeProofSerializer.fromJSONProof(proof); - startingState: parameters.startingState.map((stateRecord) => - DecodedStateSerializer.fromJSON(stateRecord) - ), - }; - - if (type === TransactionProvingType.SINGLE) { - return { - type, - proof1: await this.runtimeProofSerializer.fromJSONProof( - jsonReadyObject.proof1 + const decodedParameters: TransactionProverTaskParameters = { + publicInput: TransactionProverPublicInput.fromJSON( + parameters.publicInput + ), + startingState: parameters.startingState.map((stateRecord) => + DecodedStateSerializer.fromJSON(stateRecord) ), - parameters: { - ...partialParameters, - executionData: { - transaction: this.transactionProverArgumentsFromJson( - parameters.executionData.transaction - ), - networkState: new NetworkState( - NetworkState.fromJSON(parameters.executionData.networkState) - ), - }, - }, - }; - } - - return { - type, - proof1: await this.runtimeProofSerializer.fromJSONProof( - jsonReadyObject.proof1 - ), - proof2: await this.runtimeProofSerializer.fromJSONProof( - jsonReadyObject.proof2 - ), - parameters: { - ...partialParameters, executionData: { - transaction1: this.transactionProverArgumentsFromJson( - parameters.executionData.transaction1 - ), - transaction2: this.transactionProverArgumentsFromJson( - parameters.executionData.transaction2 + transaction: this.transactionProverArgumentsFromJson( + parameters.executionData.transaction ), - networkState: new NetworkState( - NetworkState.fromJSON(parameters.executionData.networkState) + args: TransactionProverArguments.fromJSON( + parameters.executionData.args ), }, - }, - }; + }; + return { + parameters: decodedParameters, + proof: decodedProof, + }; + }); + + assertSizeOneOrTwo(result); + + return result; } } diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts b/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts index b091abdc2..0db92f4b7 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/types/TransactionProvingTypes.ts @@ -1,8 +1,7 @@ import { - BlockProverMultiTransactionExecutionData, - BlockProverPublicInput, - BlockProverSingleTransactionExecutionData, MethodPublicOutput, + TransactionProverExecutionData, + TransactionProverPublicInput, } from "@proto-kit/protocol"; import { Proof } from "o1js"; @@ -10,30 +9,15 @@ import type { TaskStateRecord } from "../../../tracing/BlockTracingService"; export type RuntimeProof = Proof; -export enum TransactionProvingType { - SINGLE, - MULTI, -} - -export interface TransactionProverTaskParameters< - ExecutionData extends - | BlockProverSingleTransactionExecutionData - | BlockProverMultiTransactionExecutionData, -> { - publicInput: BlockProverPublicInput; - executionData: ExecutionData; +export interface TransactionProverTaskParameters { + publicInput: TransactionProverPublicInput; + executionData: TransactionProverExecutionData; startingState: TaskStateRecord[]; } -export type TransactionProvingTaskParameters = - | { - type: TransactionProvingType.SINGLE; - parameters: TransactionProverTaskParameters; - proof1: RuntimeProof; - } - | { - type: TransactionProvingType.MULTI; - parameters: TransactionProverTaskParameters; - proof1: RuntimeProof; - proof2: RuntimeProof; - }; +export type OneOrTwo = [Type] | [Type, Type]; + +export type TransactionProvingTaskParameters = OneOrTwo<{ + parameters: TransactionProverTaskParameters; + proof: RuntimeProof; +}>; diff --git a/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts index f54d52e6f..f09e20710 100644 --- a/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts +++ b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts @@ -1,29 +1,43 @@ -import { log, yieldSequential } from "@proto-kit/common"; +import { log, range, unzip, yieldSequential } from "@proto-kit/common"; import { AppliedBatchHashList, + BLOCK_ARGUMENT_BATCH_SIZE, MinaActionsHashList, TransactionHashList, WitnessedRootHashList, + BundleHashList, + BlockArguments, } from "@proto-kit/protocol"; import { inject, injectable } from "tsyringe"; +// eslint-disable-next-line import/no-extraneous-dependencies +import chunk from "lodash/chunk"; +import { Bool, Field } from "o1js"; import { StateTransitionProofParameters } from "../tasks/StateTransitionTask"; import { BlockWithResult } from "../../../storage/model/Block"; import { trace } from "../../../logging/trace"; import { Tracer } from "../../../logging/Tracer"; import { CachedLinkedLeafStore } from "../../../state/lmt/CachedLinkedLeafStore"; - import { - BlockTrace, - BlockTracingService, - BlockTracingState, -} from "./BlockTracingService"; + NewBlockArguments, + NewBlockProverParameters, +} from "../tasks/NewBlockTask"; + +import { BlockTracingService, BlockTracingState } from "./BlockTracingService"; import { StateTransitionTracingService } from "./StateTransitionTracingService"; +import { TransactionTrace } from "./TransactionTracingService"; + +type BatchTracingState = BlockTracingState; -type BatchTracingState = Omit; +export type BlockTrace = { + block: NewBlockProverParameters; + // Only for debugging and logging + heights: [string, string]; +}; export type BatchTrace = { blocks: BlockTrace[]; + transactions: TransactionTrace[]; stateTransitionTrace: StateTransitionProofParameters[]; }; @@ -40,12 +54,15 @@ export class BatchTracingService { return { pendingSTBatches: new AppliedBatchHashList(), witnessedRoots: new WitnessedRootHashList(), + bundleList: new BundleHashList(), stateRoot: block.block.fromStateRoot, eternalTransactionsList: new TransactionHashList( block.block.fromEternalTransactionsHash ), incomingMessages: new MinaActionsHashList(block.block.fromMessagesHash), networkState: block.block.networkState.before, + blockNumber: block.block.height, + blockHashRoot: block.block.fromBlockHashRoot, }; } @@ -57,25 +74,71 @@ export class BatchTracingService { // Trace blocks const numBlocks = blocks.length; + const numBatches = Math.ceil(numBlocks / BLOCK_ARGUMENT_BATCH_SIZE); + const [, blockTraces] = await yieldSequential( - blocks, - async (state, block, index) => { - const blockProverState: BlockTracingState = { - ...state, - transactionList: new TransactionHashList(), + chunk(blocks, BLOCK_ARGUMENT_BATCH_SIZE), + async (state, batch, index) => { + // Trace batch of blocks fitting in single proof + const batchTrace = this.blockTracingService.openBlock(state, batch[0]); + const start = state.blockNumber.toString(); + + const [newState, combinedTraces] = await yieldSequential( + batch, + async (state2, block, jndex) => { + const [newState2, blockTrace, transactions] = + await this.blockTracingService.traceBlock(state2, block); + return [ + newState2, + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + [blockTrace, transactions] as [ + NewBlockArguments, + TransactionTrace[], + ], + ]; + }, + state + ); + + // Fill up with dummies + const dummyBlockArgs = BlockArguments.noop( + newState, + Field(blocks.at(-1)!.result.stateRoot) + ); + const dummies = range( + blocks.length, + BLOCK_ARGUMENT_BATCH_SIZE + ).map(() => ({ + args: dummyBlockArgs, + startingStateAfterHook: {}, + startingStateBeforeHook: {}, + })); + + const [blockArgumentBatch, transactionTraces] = unzip(combinedTraces); + + const blockTrace: BlockTrace = { + block: { + ...batchTrace, + blocks: blockArgumentBatch.concat(dummies), + deferTransactionProof: Bool(numBatches - 1 < index), + deferSTProof: Bool(numBatches - 1 < index), + }, + heights: [start, newState.blockNumber.toString()], }; - const [newState, blockTrace] = - await this.blockTracingService.traceBlock( - blockProverState, - block, - index === numBlocks - 1 - ); - return [newState, blockTrace]; + + return [ + newState, + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + [blockTrace, transactionTraces] as [BlockTrace, TransactionTrace[][]], + ]; }, batchState ); - return blockTraces; + return { + blockTraces: blockTraces.map(([x]) => x), + transactionTraces: blockTraces.map(([, x]) => x), + }; } @trace("batch.trace.transitions") @@ -102,20 +165,22 @@ export class BatchTracingService { batchId: number ): Promise { if (blocks.length === 0) { - return { blocks: [], stateTransitionTrace: [] }; + return { blocks: [], stateTransitionTrace: [], transactions: [] }; } // Traces the STs and the blocks in parallel, however not in separate processes // Therefore, we only optimize the idle time for async operations like DB reads - const [blockTraces, stateTransitionTrace] = await Promise.all([ - // Trace blocks - this.traceBlocks(blocks), - // Trace STs - this.traceStateTransitions(blocks, merkleTreeStore), - ]); + const [{ blockTraces, transactionTraces }, stateTransitionTrace] = + await Promise.all([ + // Trace blocks + this.traceBlocks(blocks), + // Trace STs + this.traceStateTransitions(blocks, merkleTreeStore), + ]); return { blocks: blockTraces, + transactions: transactionTraces.flat(2), stateTransitionTrace, }; } diff --git a/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts b/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts index 8b6a98bbf..e0e8867c8 100644 --- a/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts +++ b/packages/sequencer/src/protocol/production/tracing/BlockTracingService.ts @@ -1,17 +1,24 @@ import { + BlockArguments, BlockProverPublicInput, BlockProverState, + Bundle, + TransactionHashList, + TransactionProverState, WitnessedRootWitness, + BundleHashList, + BundlePreimage, } from "@proto-kit/protocol"; import { Bool, Field } from "o1js"; import { toStateTransitionsHash } from "@proto-kit/module"; -import { yieldSequential } from "@proto-kit/common"; -// eslint-disable-next-line import/no-extraneous-dependencies -import chunk from "lodash/chunk"; +import { NonMethods, yieldSequential } from "@proto-kit/common"; import { inject, injectable } from "tsyringe"; import { BlockWithResult } from "../../../storage/model/Block"; -import type { NewBlockProverParameters } from "../tasks/NewBlockTask"; +import type { + NewBlockArguments, + NewBlockProverParameters, +} from "../tasks/NewBlockTask"; import { Tracer } from "../../../logging/Tracer"; import { trace } from "../../../logging/trace"; @@ -23,24 +30,10 @@ import { export type TaskStateRecord = Record; -export type BlockTracingState = Pick< - BlockProverState, - | "witnessedRoots" - | "stateRoot" - | "pendingSTBatches" - | "networkState" - | "transactionList" - | "eternalTransactionsList" - | "incomingMessages" +export type BlockTracingState = NonMethods< + Omit >; -export type BlockTrace = { - blockParams: NewBlockProverParameters; - transactions: TransactionTrace[]; - // Only for debugging and logging - height: string; -}; - @injectable() export class BlockTracingService { public constructor( @@ -49,38 +42,45 @@ export class BlockTracingService { public readonly tracer: Tracer ) {} - @trace("batch.trace.block", ([, block]) => ({ - height: block.block.height.toString(), - })) - public async traceBlock( + public openBlock( state: BlockTracingState, - block: BlockWithResult, - includeSTProof: boolean - ): Promise<[BlockTracingState, BlockTrace]> { + { block: firstBlock, result: firstResult }: BlockWithResult + ): Pick< + NewBlockProverParameters, + "publicInput" | "networkState" | "blockWitness" + > { const publicInput: BlockProverPublicInput = new BlockProverPublicInput({ stateRoot: state.stateRoot, - blockNumber: block.block.height, - blockHashRoot: block.block.fromBlockHashRoot, - eternalTransactionsHash: block.block.fromEternalTransactionsHash, - incomingMessagesHash: block.block.fromMessagesHash, - transactionsHash: Field(0), - networkStateHash: block.block.networkState.before.hash(), - witnessedRootsHash: state.witnessedRoots.commitment, - pendingSTBatchesHash: state.pendingSTBatches.commitment, + blockNumber: firstBlock.height, + blockHashRoot: firstBlock.fromBlockHashRoot, + eternalTransactionsHash: firstBlock.fromEternalTransactionsHash, + incomingMessagesHash: firstBlock.fromMessagesHash, + networkStateHash: firstBlock.networkState.before.hash(), + remainders: { + witnessedRootsHash: state.witnessedRoots.commitment, + pendingSTBatchesHash: state.pendingSTBatches.commitment, + bundlesHash: state.bundleList.commitment, + }, }); + return { + publicInput, + networkState: firstBlock.networkState.before, + blockWitness: firstResult.blockHashWitness, + }; + } + + @trace("batch.trace.block", ([, block]) => ({ + height: block.block.height.toString(), + })) + public async traceBlock( + state: BlockTracingState, + block: BlockWithResult + ): Promise<[BlockTracingState, NewBlockArguments, TransactionTrace[]]> { const startingStateBeforeHook = collectStartingState( block.block.beforeBlockStateTransitions ); - const blockTrace = { - publicInput, - networkState: block.block.networkState.before, - deferSTProof: Bool(!includeSTProof), - blockWitness: block.result.blockHashWitness, - startingStateBeforeHook, - } satisfies Partial; - state.pendingSTBatches.push({ batchHash: toStateTransitionsHash( block.block.beforeBlockStateTransitions @@ -89,26 +89,71 @@ export class BlockTracingService { }); state.networkState = block.block.networkState.during; + const blockArgsPartial = { + fromPendingSTBatchesHash: state.pendingSTBatches.commitment, + fromWitnessedRootsHash: state.witnessedRoots.commitment, + }; + + const transactionProverState = new TransactionProverState({ + transactionList: new TransactionHashList(), + witnessedRoots: state.witnessedRoots, + pendingSTBatches: state.pendingSTBatches, + incomingMessages: state.incomingMessages, + eternalTransactionsList: state.eternalTransactionsList, + bundleList: new BundleHashList( + state.bundleList.commitment, + // The preimage here is just the current state (the start of the block) + // Internally, both provers will detect commitment == preimage and start + // a new bundle + new BundlePreimage({ + preimage: state.bundleList.commitment, + fromStateTransitionsHash: state.pendingSTBatches.commitment, + fromWitnessedRootsHash: state.witnessedRoots.commitment, + }) + ), + }); + const [afterState, transactionTraces] = await yieldSequential( - chunk(block.block.transactions, 2), - async (input, [transaction1, transaction2]) => { + block.block.transactions, + async (input, transaction) => { const [output, transactionTrace] = - transaction2 !== undefined - ? await this.transactionTracing.createMultiTransactionTrace( - input, - transaction1, - transaction2 - ) - : await this.transactionTracing.createSingleTransactionTrace( - input, - transaction1 - ); + await this.transactionTracing.createTransactionTrace( + input, + state.networkState, + transaction + ); return [output, transactionTrace]; }, - state + transactionProverState + ); + + // TODO Maybe replace this with replicating the in-circuit version inside createTransactionTrace + // Add to bundleList (before all the afterBlock stuff since bundles only care about + // all the stuff that happens in the TransactionProver) + // Also, this list is a different instance than the one used in transaction tracing + const finishedBundle = new Bundle({ + networkStateHash: state.networkState.hash(), + transactionsHash: block.block.transactionsHash, + pendingSTBatchesHash: { + from: blockArgsPartial.fromPendingSTBatchesHash, + to: afterState.pendingSTBatches.commitment, + }, + witnessedRootsHash: { + from: blockArgsPartial.fromWitnessedRootsHash, + to: afterState.witnessedRoots.commitment, + }, + }); + state.bundleList.pushIf( + finishedBundle, + afterState.transactionList.isEmpty().not() ); + state.pendingSTBatches = afterState.pendingSTBatches; + state.witnessedRoots = afterState.witnessedRoots; + state.incomingMessages = afterState.incomingMessages; + state.eternalTransactionsList = afterState.eternalTransactionsList; + const preimage = afterState.witnessedRoots .getUnconstrainedValues() .get() @@ -119,6 +164,23 @@ export class BlockTracingService { preimage: preimage ?? Field(0), }; + // We create the batch here, because we need the afterBlockRootWitness, + // but the afterBlock's witnessed root can't be in the arguments, because + // it is temporally **after** the bundle, not inside it + const args = new BlockArguments({ + transactionsHash: afterState.transactionList.commitment, + afterBlockRootWitness, + witnessedRootsHash: { + from: blockArgsPartial.fromWitnessedRootsHash, + to: state.witnessedRoots.commitment, + }, + pendingSTBatchesHash: { + from: blockArgsPartial.fromPendingSTBatchesHash, + to: state.pendingSTBatches.commitment, + }, + isDummy: Bool(false), + }); + if (afterState.pendingSTBatches.commitment.equals(0).not().toBoolean()) { state.witnessedRoots.witnessRoot( { @@ -133,19 +195,24 @@ export class BlockTracingService { const startingStateAfterHook = collectStartingState( block.result.afterBlockStateTransitions ); + state.pendingSTBatches.push({ + batchHash: toStateTransitionsHash( + block.result.afterBlockStateTransitions + ), + applied: Bool(true), + }); state.networkState = block.result.afterNetworkState; + state.blockNumber = state.blockNumber.add(1); + return [ - afterState, + state, { - blockParams: { - ...blockTrace, - startingStateAfterHook, - afterBlockRootWitness, - }, - transactions: transactionTraces, - height: block.block.height.toString(), + args, + startingStateBeforeHook, + startingStateAfterHook, }, + transactionTraces, ]; } } diff --git a/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts b/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts index ee591d282..89e391e50 100644 --- a/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts +++ b/packages/sequencer/src/protocol/production/tracing/TransactionTracingService.ts @@ -1,39 +1,30 @@ import { addTransactionToBundle, - BlockProverMultiTransactionExecutionData, - BlockProverPublicInput, - BlockProverSingleTransactionExecutionData, NetworkState, + TransactionProverArguments, + TransactionProverPublicInput, + TransactionProverState, TransactionProverTransactionArguments, } from "@proto-kit/protocol"; -import { Bool, Field } from "o1js"; -import { MAX_FIELD } from "@proto-kit/common"; +import { Bool } from "o1js"; import { toStateTransitionsHash } from "@proto-kit/module"; import { injectable } from "tsyringe"; import { TransactionExecutionResult } from "../../../storage/model/Block"; import { PendingTransaction } from "../../../mempool/PendingTransaction"; import type { RuntimeProofParameters } from "../tasks/RuntimeProvingTask"; -import { - TransactionProverTaskParameters, - TransactionProvingType, -} from "../tasks/serializers/types/TransactionProvingTypes"; +import { TransactionProverTaskParameters } from "../tasks/serializers/types/TransactionProvingTypes"; import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; import { VerificationKeyService } from "../../runtime/RuntimeVerificationKeyService"; -import type { BlockTracingState, TaskStateRecord } from "./BlockTracingService"; - -export type TransactionTrace = - | { - type: TransactionProvingType.SINGLE; - transaction: TransactionProverTaskParameters; - runtime: [RuntimeProofParameters]; - } - | { - type: TransactionProvingType.MULTI; - transaction: TransactionProverTaskParameters; - runtime: [RuntimeProofParameters, RuntimeProofParameters]; - }; +import type { TaskStateRecord } from "./BlockTracingService"; + +export type TransactionTrace = { + transaction: TransactionProverTaskParameters; + runtime: RuntimeProofParameters; +}; + +export type TransactionTracingState = TransactionProverState; export function collectStartingState( stateTransitions: UntypedStateTransition[] @@ -76,23 +67,17 @@ export class TransactionTracingService { } private getTransactionProofPublicInput( - previousState: BlockTracingState - ): BlockProverPublicInput { + previousState: TransactionTracingState + ): TransactionProverPublicInput { return { - stateRoot: previousState.stateRoot, - transactionsHash: previousState.transactionList.commitment, + bundlesHash: previousState.bundleList.commitment, eternalTransactionsHash: previousState.eternalTransactionsList.commitment, incomingMessagesHash: previousState.incomingMessages.commitment, - networkStateHash: previousState.networkState.hash(), - witnessedRootsHash: previousState.witnessedRoots.commitment, - pendingSTBatchesHash: previousState.pendingSTBatches.commitment, - blockHashRoot: Field(0), - blockNumber: MAX_FIELD, }; } private appendTransactionToState( - previousState: BlockTracingState, + previousState: TransactionTracingState, transaction: TransactionExecutionResult ) { // TODO Remove this call and instead reuse results from sequencing @@ -128,7 +113,8 @@ export class TransactionTracingService { } private async traceTransaction( - previousState: BlockTracingState, + previousState: TransactionTracingState, + networkState: NetworkState, transaction: TransactionExecutionResult ) { const beforeHookStartingState = collectStartingState( @@ -137,90 +123,61 @@ export class TransactionTracingService { const runtimeTrace1 = this.createRuntimeProofParams( transaction, - previousState.networkState + networkState ); const afterHookStartingState = collectStartingState( transaction.stateTransitions[2].stateTransitions.flat() ); + const args: TransactionProverArguments = { + networkState: networkState, + transactionHash: previousState.transactionList.commitment, + pendingSTBatchesHash: previousState.pendingSTBatches.commitment, + witnessedRootsHash: previousState.witnessedRoots.commitment, + bundleListPreimage: previousState.bundleList.preimage!, + }; + const newState = this.appendTransactionToState(previousState, transaction); + newState.bundleList.addToBundle(newState, networkState); + return { state: newState, runtime: runtimeTrace1, startingState: [beforeHookStartingState, afterHookStartingState], + args, }; } - public async createSingleTransactionTrace( - previousState: BlockTracingState, + public async createTransactionTrace( + previousState: TransactionTracingState, + networkState: NetworkState, transaction: TransactionExecutionResult - ): Promise<[BlockTracingState, TransactionTrace]> { + ): Promise<[TransactionTracingState, TransactionTrace]> { const publicInput = this.getTransactionProofPublicInput(previousState); const { state: newState, startingState, runtime, - } = await this.traceTransaction(previousState, transaction); + args, + } = await this.traceTransaction(previousState, networkState, transaction); - const transactionTrace: TransactionProverTaskParameters = - { - executionData: { - transaction: await this.getTransactionData(transaction.tx), - networkState: previousState.networkState, - }, - startingState, - publicInput, - }; - - return [ - newState, - { - type: TransactionProvingType.SINGLE, - transaction: transactionTrace, - runtime: [runtime], + const transactionTrace: TransactionProverTaskParameters = { + executionData: { + transaction: await this.getTransactionData(transaction.tx), + args, }, - ]; - } - - public async createMultiTransactionTrace( - previousState: BlockTracingState, - transaction1: TransactionExecutionResult, - transaction2: TransactionExecutionResult - ): Promise<[BlockTracingState, TransactionTrace]> { - const publicInput = this.getTransactionProofPublicInput(previousState); - - const { - state: tmpState, - startingState: startingState1, - runtime: runtime1, - } = await this.traceTransaction(previousState, transaction1); - - const { - state: resultState, - startingState: startingState2, - runtime: runtime2, - } = await this.traceTransaction(tmpState, transaction2); - - const transactionTrace: TransactionProverTaskParameters = - { - executionData: { - transaction1: await this.getTransactionData(transaction1.tx), - transaction2: await this.getTransactionData(transaction2.tx), - networkState: previousState.networkState, - }, - startingState: [...startingState1, ...startingState2], - publicInput, - }; + startingState, + publicInput, + }; return [ - resultState, + newState, { - type: TransactionProvingType.MULTI, transaction: transactionTrace, - runtime: [runtime1, runtime2], + runtime, }, ]; } diff --git a/packages/sequencer/src/worker/queue/LocalTaskQueue.ts b/packages/sequencer/src/worker/queue/LocalTaskQueue.ts index c6046477e..e325508ca 100644 --- a/packages/sequencer/src/worker/queue/LocalTaskQueue.ts +++ b/packages/sequencer/src/worker/queue/LocalTaskQueue.ts @@ -1,4 +1,4 @@ -import { log, mapSequential, noop } from "@proto-kit/common"; +import { log, mapSequential, noop, sleep } from "@proto-kit/common"; import { sequencerModule } from "../../sequencer/builder/SequencerModule"; import { TaskPayload } from "../flow/Task"; @@ -8,12 +8,6 @@ import { InstantiatedQueue, TaskQueue } from "./TaskQueue"; import { ListenerList } from "./ListenerList"; import { AbstractTaskQueue } from "./AbstractTaskQueue"; -async function sleep(ms: number) { - await new Promise((resolve) => { - setTimeout(resolve, ms); - }); -} - // Had to extract it to here bc eslint would ruin the code interface QueueListener { (payload: TaskPayload): Promise; diff --git a/packages/sequencer/src/worker/worker/FlowTaskWorker.ts b/packages/sequencer/src/worker/worker/FlowTaskWorker.ts index d2cac1f89..d07142898 100644 --- a/packages/sequencer/src/worker/worker/FlowTaskWorker.ts +++ b/packages/sequencer/src/worker/worker/FlowTaskWorker.ts @@ -92,7 +92,7 @@ export class FlowTaskWorker[]> // Call them in order of registration, because the prepare methods // might depend on each other or a result that is saved in a DI singleton for (const task of tasks) { - log.debug(`Preparing task ${task.constructor.name}`); + log.info(`Preparing task ${task.constructor.name}`); // eslint-disable-next-line no-await-in-loop await task.prepare(); log.debug(`${task.constructor.name} prepared`); diff --git a/packages/sequencer/test-integration/workers/modules.ts b/packages/sequencer/test-integration/workers/modules.ts index d65204c04..ba1b3477f 100644 --- a/packages/sequencer/test-integration/workers/modules.ts +++ b/packages/sequencer/test-integration/workers/modules.ts @@ -4,7 +4,7 @@ import { Protocol } from "@proto-kit/protocol"; import { VanillaProtocolModules } from "@proto-kit/library"; import { ModulesConfig } from "@proto-kit/common"; // eslint-disable-next-line import/no-extraneous-dependencies -import { BullQueueConfig } from "@proto-kit/deployment"; +import { BullQueueConfig, S3Config } from "@proto-kit/deployment"; import { ProvenBalance } from "../../test/integration/mocks/ProvenBalance"; import { ProtocolStateTestHook } from "../../test/integration/mocks/ProtocolStateTestHook"; @@ -29,6 +29,7 @@ export const runtimeProtocolConfig: ModulesConfig<{ Protocol: { AccountState: {}, BlockProver: {}, + TransactionProver: {}, StateTransitionProver: {}, BlockHeight: {}, LastStateRoot: {}, @@ -44,3 +45,14 @@ export const BullConfig: BullQueueConfig = { db: 1, }, }; + +export const RemoteCacheConfig: S3Config = { + bucketName: "worker-test-proven", + client: { + endPoint: "localhost", + port: 9000, + useSSL: false, + accessKey: "minioadmin", + secretKey: "minioadmin", + }, +}; diff --git a/packages/sequencer/test-integration/workers/workers-proven.test.ts b/packages/sequencer/test-integration/workers/workers-proven.test.ts index f914473e4..251f87f84 100644 --- a/packages/sequencer/test-integration/workers/workers-proven.test.ts +++ b/packages/sequencer/test-integration/workers/workers-proven.test.ts @@ -2,7 +2,7 @@ import "reflect-metadata"; import { container } from "tsyringe"; import { PrivateKey, UInt64 } from "o1js"; import { expectDefined, log } from "@proto-kit/common"; -import { BullQueue } from "@proto-kit/deployment"; +import { BullQueue, S3RemoteCache } from "@proto-kit/deployment"; import { BatchProducerModule, @@ -20,6 +20,7 @@ import { BlockTestService } from "../../test/integration/services/BlockTestServi import { BullConfig, protocolClass, + RemoteCacheConfig, runtimeClass, runtimeProtocolConfig, } from "./modules"; @@ -27,6 +28,7 @@ import { ChildProcessWorker } from "./ChildProcessWorker"; const timeout = 300000; +// true const proofsEnabled = false; describe("worker-proven", () => { @@ -63,6 +65,7 @@ describe("worker-proven", () => { TaskQueue: BullQueue, FeeStrategy: ConstantFeeStrategy, SequencerStartupModule, + RemoteCache: S3RemoteCache, }); const app = AppChain.from({ @@ -82,6 +85,7 @@ describe("worker-proven", () => { TaskQueue: BullConfig, FeeStrategy: {}, SequencerStartupModule: {}, + RemoteCache: RemoteCacheConfig, }, ...runtimeProtocolConfig, }); diff --git a/packages/sequencer/test/integration/BlockProduction-test.ts b/packages/sequencer/test/integration/BlockProduction-test.ts index 71c4172f9..62f29565c 100644 --- a/packages/sequencer/test/integration/BlockProduction-test.ts +++ b/packages/sequencer/test/integration/BlockProduction-test.ts @@ -418,7 +418,7 @@ export function testBlockProduction< const numberTxs = 3; - it("should produce block with multiple transaction", async () => { + it("should produce block with multiple transactions", async () => { log.setLevel("TRACE"); expect.assertions(6 + 4 * numberTxs); diff --git a/packages/sequencer/test/integration/Proven.test.ts b/packages/sequencer/test/integration/Proven.test.ts index bca7f53af..77f95dbf2 100644 --- a/packages/sequencer/test/integration/Proven.test.ts +++ b/packages/sequencer/test/integration/Proven.test.ts @@ -163,7 +163,8 @@ describe.skip("Proven", () => { const vkService = new ChildVerificationKeyService(); const proofs = new InMemoryAreProofsEnabled(); proofs.setProofsEnabled(true); - const registry = new CompileRegistry(proofs); + const registry = + appChain.sequencer.dependencyContainer.resolve(CompileRegistry); registry.addArtifactsRaw({ BlockProver: { verificationKey: MOCK_VERIFICATION_KEY, diff --git a/packages/stack/package.json b/packages/stack/package.json index 2851234e1..adf6a640c 100644 --- a/packages/stack/package.json +++ b/packages/stack/package.json @@ -11,6 +11,7 @@ "test:file": "node --experimental-vm-modules ../../node_modules/jest/bin/jest.js", "test": "npm run test:file -- ./test/**", "test:watch": "npm run test:file -- ./test/** --watch", + "test:integration": "npm run test:file -- ./test-integration/** --runInBand", "graphql": "npm run build && node --experimental-vm-modules --experimental-wasm-modules --es-module-specifier-resolution=node ./dist/scripts/graphql/run-graphql.js", "start": "node --experimental-vm-modules --es-module-specifier-resolution=node ./dist/start.js" }, @@ -32,7 +33,8 @@ "tsyringe": "^4.10.0" }, "devDependencies": { - "@jest/globals": "^29.5.0" + "@jest/globals": "^29.5.0", + "cachedir": "^2.4.0" }, "dependencies": { "reflect-metadata": "^0.1.13" diff --git a/packages/stack/test-integration/caching/caching.test.ts b/packages/stack/test-integration/caching/caching.test.ts new file mode 100644 index 000000000..896cdf645 --- /dev/null +++ b/packages/stack/test-integration/caching/caching.test.ts @@ -0,0 +1,160 @@ +import "reflect-metadata"; +import { + BlockStorageNetworkStateModule, + ClientAppChain, + InMemoryBlockExplorer, + InMemorySigner, + InMemoryTransactionSender, + StateServiceQueryModule, +} from "@proto-kit/sdk"; +import { PrivateKey } from "o1js"; +import { Runtime } from "@proto-kit/module"; +import { Protocol } from "@proto-kit/protocol"; +import { + VanillaProtocolModules, + VanillaRuntimeModules, +} from "@proto-kit/library"; +import { log } from "@proto-kit/common"; +import { + BatchProducerModule, + BlockProducerModule, + InMemoryDatabase, + LocalTaskQueue, + LocalTaskWorkerModule, + ManualBlockTrigger, + NoopBaseLayer, + PrivateMempool, + Sequencer, + SequencerStartupModule, + VanillaTaskWorkerModules, +} from "@proto-kit/sequencer"; +import { container } from "tsyringe"; +import { S3RemoteCache } from "@proto-kit/deployment"; + +import { TestBalances } from "../../src"; +import * as fs from "node:fs"; +import cachedir from "cachedir"; + +export async function startAppChain() { + const appChain = ClientAppChain.from({ + Runtime: Runtime.from( + VanillaRuntimeModules.with({ + Balances: TestBalances, + }) + ), + + Protocol: Protocol.from(VanillaProtocolModules.with({})), + + Sequencer: Sequencer.from({ + Database: InMemoryDatabase, + + Mempool: PrivateMempool, + LocalTaskWorkerModule: LocalTaskWorkerModule.from( + VanillaTaskWorkerModules.withoutSettlement() + ), + + BaseLayer: NoopBaseLayer, + BatchProducerModule, + BlockProducerModule, + BlockTrigger: ManualBlockTrigger, + TaskQueue: LocalTaskQueue, + RemoteCache: S3RemoteCache, + + SequencerStartupModule, + }), + + Signer: InMemorySigner, + TransactionSender: InMemoryTransactionSender, + QueryTransportModule: StateServiceQueryModule, + NetworkStateTransportModule: BlockStorageNetworkStateModule, + BlockExplorerTransportModule: InMemoryBlockExplorer, + }); + + appChain.configure({ + Runtime: { + Balances: {}, + }, + + Protocol: { + ...Protocol.defaultConfig(), + TransactionFee: { + tokenId: 0n, + feeRecipient: PrivateKey.random().toPublicKey().toBase58(), + baseFee: 0n, + methods: {}, + perWeightUnitFee: 0n, + }, + }, + + Sequencer: { + SequencerStartupModule: {}, + Database: {}, + + Mempool: {}, + BatchProducerModule: {}, + LocalTaskWorkerModule: VanillaTaskWorkerModules.defaultConfig(), + BaseLayer: {}, + TaskQueue: {}, + + BlockProducerModule: { + allowEmptyBlock: true, + }, + + BlockTrigger: {}, + + RemoteCache: { + client: { + endPoint: "localhost", + port: 9000, + useSSL: false, + accessKey: "minioadmin", + secretKey: "minioadmin", + }, + bucketName: "caching-stack-test", + }, + }, + + TransactionSender: {}, + QueryTransportModule: {}, + NetworkStateTransportModule: {}, + + Signer: { + signer: PrivateKey.random(), + }, + BlockExplorerTransportModule: {}, + }); + + await appChain.start(true, container.createChildContainer()); + + return appChain; +} + +function clearCache() { + fs.rmSync(cachedir("o1js"), { force: true, recursive: true }); + fs.mkdirSync(cachedir("o1js")); +} + +// This test is convered by workers-proven.test.ts +describe.skip("caching stack test", () => { + let appchain: Awaited>; + + beforeAll(async () => { + log.setLevel("DEBUG"); + clearCache(); + appchain = await startAppChain(); + }, 500000); + + afterAll(async () => { + await appchain.close(); + }); + + it("should have compiled and pushed to cache", async () => { + const cache = appchain.sequencer.resolve("RemoteCache"); + + const blockProverObjects = await cache.getObjects("blockprover"); + expect(blockProverObjects.length).toBe(6); + + const srsObjects = await cache.getObjects("srs"); + expect(srsObjects.length).toBeGreaterThan(8); + }, 500000); +}); diff --git a/packages/stack/test-integration/tsconfig.json b/packages/stack/test-integration/tsconfig.json new file mode 100644 index 000000000..a0c8c946b --- /dev/null +++ b/packages/stack/test-integration/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "./../../../tsconfig.json", + "compilerOptions": { + "experimentalDecorators": true + }, + "include": ["./**/*.ts", "./*.ts"] +}