From 9b90918d0992108efa673d56539d1dfcac4465c9 Mon Sep 17 00:00:00 2001 From: Pablo Castellano Date: Tue, 3 Sep 2024 18:43:23 +0200 Subject: [PATCH 1/2] feat: add support for Span batches (dirty but working) --- package-lock.json | 110 +++++++ package.json | 1 + src/batches/RawSpanBatch.ts | 34 +- src/batches/SingularBatch.ts | 4 + src/batches/batch.ts | 29 +- src/frames/frame.ts | 27 +- src/index.ts | 62 ++++ src/index2.ts | 421 +++++++++++++++++++++++++ src/transactions/batcherTransaction.ts | 13 +- 9 files changed, 686 insertions(+), 15 deletions(-) create mode 100644 src/index2.ts diff --git a/package-lock.json b/package-lock.json index 54358d2..b3c183e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", + "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" @@ -2219,6 +2220,11 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, + "node_modules/aes-js": { + "version": "4.0.0-beta.5", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", + "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" + }, "node_modules/agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -3480,6 +3486,68 @@ "rlp": "bin/rlp" } }, + "node_modules/ethers": { + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", + "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/ethers-io/" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@adraffy/ens-normalize": "1.10.1", + "@noble/curves": "1.2.0", + "@noble/hashes": "1.3.2", + "@types/node": "18.15.13", + "aes-js": "4.0.0-beta.5", + "tslib": "2.4.0", + "ws": "8.17.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/ethers/node_modules/@adraffy/ens-normalize": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", + "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" + }, + "node_modules/ethers/node_modules/@types/node": { + "version": "18.15.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", + "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" + }, + "node_modules/ethers/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/ethers/node_modules/ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -11236,6 +11304,11 @@ "dev": true, "requires": {} }, + "aes-js": { + "version": "4.0.0-beta.5", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", + "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" + }, "agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -12189,6 +12262,43 @@ } } }, + "ethers": { + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", + "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", + "requires": { + "@adraffy/ens-normalize": "1.10.1", + "@noble/curves": "1.2.0", + "@noble/hashes": "1.3.2", + "@types/node": "18.15.13", + "aes-js": "4.0.0-beta.5", + "tslib": "2.4.0", + "ws": "8.17.1" + }, + "dependencies": { + "@adraffy/ens-normalize": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", + "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" + }, + "@types/node": { + "version": "18.15.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", + "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" + }, + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "requires": {} + } + } + }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", diff --git a/package.json b/package.json index 99a1f89..f3c9f8b 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", + "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" diff --git a/src/batches/RawSpanBatch.ts b/src/batches/RawSpanBatch.ts index 1e67dab..8edea15 100644 --- a/src/batches/RawSpanBatch.ts +++ b/src/batches/RawSpanBatch.ts @@ -1,11 +1,41 @@ import { NestedUint8Array } from 'rlp' import { InnerBatch } from './batch' +/* +type spanBatchPrefix struct { + relTimestamp uint64 // Relative timestamp of the first block + l1OriginNum uint64 // L1 origin number + parentCheck [20]byte // First 20 bytes of the first block's parent hash + l1OriginCheck [20]byte // First 20 bytes of the last block's L1 origin hash +} + +type spanBatchPayload struct { + blockCount uint64 // Number of L2 block in the span + originBits *big.Int // Standard span-batch bitlist of blockCount bits. Each bit indicates if the L1 origin is changed at the L2 block. + blockTxCounts []uint64 // List of transaction counts for each L2 block + txs *spanBatchTxs // Transactions encoded in SpanBatch specs +} +*/ + +// https://ethereum.stackexchange.com/questions/163066/how-is-rollup-data-verified-with-blobs +// Span batches (post-Delta hardfork) +// https://specs.optimism.io/protocol/delta/span-batches.html#span-batch-format export class RawSpanBatch { // eslint-disable-next-line @typescript-eslint/no-unused-vars static decode(data: Uint8Array | NestedUint8Array): InnerBatch { - // TODO: implement + console.log('data0') + // console.log(data[0]) + // TODO: implement: prefix ++ payload + // https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49 // const decoded = rlp.decode(data) - return {} as InnerBatch + // return { data } as InnerBatch + + return { + parentHash: data.toString(), + epochNum: 0, + epochHash: '0x', + timestamp: 3, + transactions: [] + } } } diff --git a/src/batches/SingularBatch.ts b/src/batches/SingularBatch.ts index 96903b4..0b24b69 100644 --- a/src/batches/SingularBatch.ts +++ b/src/batches/SingularBatch.ts @@ -5,6 +5,10 @@ import { OpStackTransactionSerialized } from 'viem/chains' import { parseTransaction } from 'viem/op-stack' import type { InnerBatch } from './batch' +// https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/singular_batch.go#L22 +// https://specs.optimism.io/protocol/derivation.html#batch-submission-wire-format +// Singular batches (pre-Delta hardfork) +// https://specs.optimism.io/protocol/derivation.html#batch-format export class SingularBatch { static decode(data: Uint8Array | NestedUint8Array): InnerBatch { const decoded = rlp.decode(data) diff --git a/src/batches/batch.ts b/src/batches/batch.ts index 32df990..ec9e7cd 100644 --- a/src/batches/batch.ts +++ b/src/batches/batch.ts @@ -1,8 +1,9 @@ +import fs from 'fs' import rlp, { NestedUint8Array } from 'rlp' -import zlib from 'zlib' import stream from 'stream' -import { SingularBatch } from './SingularBatch' +import zlib from 'zlib' import { RawSpanBatch } from './RawSpanBatch' +import { SingularBatch } from './SingularBatch' type Transaction = { type?: string @@ -42,7 +43,8 @@ enum BatchType { const MAX_BYTES_PER_CHANNEL = 10_000_000 export const parseBatchesData = async (compressedBatches: string): Promise => { - const decompressed = await decompressBatches(compressedBatches) + console.log('parsing') + const decompressed = await decompressBatches_v0(compressedBatches) const decodedBatches: Batches = [] let dataToDecode: Uint8Array = decompressed while (dataToDecode?.length) { @@ -53,11 +55,24 @@ export const parseBatchesData = async (compressedBatches: string): Promise => { +export const decompressBatches_v0 = async (compressedBatches: string): Promise => { const inputBuffer = Buffer.from(compressedBatches, 'hex') + console.log('decompressing', inputBuffer.length, 'bytes') + + fs.writeFileSync('blob1_ts.test', inputBuffer) + console.log('written blob1_ts.test') + + //console.log(inputBuffer) + console.log(compressedBatches.slice(0, 100)) + console.log(inputBuffer.toString('hex').slice(0, 100)) + try { // Decompress the input buffer - const decompress = zlib.createInflate({ maxOutputLength: MAX_BYTES_PER_CHANNEL }) + const decompress = zlib.createInflate({ + maxOutputLength: MAX_BYTES_PER_CHANNEL, + finishFlush: zlib.constants.Z_SYNC_FLUSH + }) + //const decompress = zlib.createInflate() const decompressStream = stream.Readable.from(inputBuffer) const chunks: Buffer[] = [] @@ -71,13 +86,15 @@ const decompressBatches = async (compressedBatches: string): Promise => } } -const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => { +export const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => { if (decodedBatch.length < 1) throw new Error('Batch too short') // first byte is the batch type switch (decodedBatch[0]) { case BatchType.SingularBatch: return { inner: SingularBatch.decode(decodedBatch.slice(1)) } case BatchType.SpanBatch: + console.error('SpanBatch is not implemented') + //return { inner: decodedBatch } return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) } default: throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`) diff --git a/src/frames/frame.ts b/src/frames/frame.ts index 64c0ce2..f97e75f 100644 --- a/src/frames/frame.ts +++ b/src/frames/frame.ts @@ -24,7 +24,7 @@ const BYTES_4_LENGTH = 4 * BYTE_CHARS const BYTES_13_LENGTH = 13 * BYTE_CHARS const BYTES_16_LENGTH = 16 * BYTE_CHARS -export const extractFrames = (data: string): FramesWithCompressedData => { +export const extractFrames_v0 = (data: string): FramesWithCompressedData => { const frames: FramesWithCompressedData = [] let offset = 0 while (offset < data.length) { @@ -34,23 +34,33 @@ export const extractFrames = (data: string): FramesWithCompressedData => { } const channelId = data.slice(offset, offset + BYTES_16_LENGTH) + console.log('channel:', channelId) + offset += BYTES_16_LENGTH const frameNumber = Number(`0x${data.slice(offset, offset + BYTES_2_LENGTH)}`) + console.log('frame num:', frameNumber) + offset += BYTES_2_LENGTH const frameDataLengthInBytes = Number(`0x${data.slice(offset, offset + BYTES_4_LENGTH)}`) + console.log('frame data length:', frameDataLengthInBytes) + offset += BYTES_4_LENGTH const frameDataLength = frameDataLengthInBytes * BYTE_CHARS if (frameDataLengthInBytes > MAX_FRAME_LENGTH || offset + frameDataLength > data.length) { - throw new Error('Frame data length is too large or exceeds buffer length') + throw new Error( + `Frame data length is too large or exceeds buffer length: ${frameDataLengthInBytes}, ${data.length}, ${offset + frameDataLength}` + ) } const frameData = `${data.slice(offset, offset + frameDataLength)}` offset += frameDataLength const isLast = Number(`0x${data.slice(offset, offset + BYTES_1_LENGTH)}`) !== 0 + console.log('is_last:', Number(`0x${data.slice(offset, offset + BYTES_1_LENGTH)}`)) + offset += BYTES_1_LENGTH frames.push({ channelId, frameNumber, data: frameData, isLast }) @@ -63,7 +73,7 @@ export const extractFrames = (data: string): FramesWithCompressedData => { return frames } -export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise => { +export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Promise => { const batches = await parseBatchesData(frame.data) return { channelId: frame.channelId, @@ -72,3 +82,14 @@ export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise batches } } + +export const addBatchesToFrame_v1 = async (channel: string): Promise => { + const batches = await parseBatchesData(channel) + return { + // FIXME + channelId: 'asdfg', + frameNumber: 0, + isLast: true, + batches + } +} diff --git a/src/index.ts b/src/index.ts index febfce3..b39fae8 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,9 +3,30 @@ import fs from 'fs' import path from 'path' import { BatcherTransaction, extractBatcherTransaction } from './transactions/batcherTransaction' +/** + * Convert a binary file to a text file where text is the hexadecimal representation. + * @param inputFilePath Path to the binary input file. + * @param outputFilePath Path to the output text file. + */ +function convertBinaryToHex(inputFilePath: string, outputFilePath: string): void { + // Read the binary file into a Buffer + const binaryData = fs.readFileSync(inputFilePath) + + // Convert the binary data to a hexadecimal string + const hexString = binaryData.toString('hex') + + // TODO: add leading 0x + + // Write the hexadecimal string to the output file + fs.writeFileSync(outputFilePath, hexString) + + console.log(`Successfully converted ${inputFilePath} to hexadecimal format and saved as ${outputFilePath}`) +} + export const testWithExampleData = async ( filePath: string = 'example-data/calldata.txt' ): Promise => { + console.log('testing with', filePath) const examplePath = path.join(path.dirname(__dirname), filePath) const exampleCallData = fs.readFileSync(examplePath).toString() return await extractBatcherTransaction(exampleCallData) @@ -21,3 +42,44 @@ export const decodeBatcherTransaction = async (txHash: string, providerUrl: stri export const decodeBatcherTransactionCalldata = async (calldata: string): Promise => { return await extractBatcherTransaction(calldata) } + +//convertBinaryToHex('opstack_blobs_19538908.bin', 'opstack_blobs_19538908.txt') +// +// testWithExampleData() +// .then((result) => { +// console.log('Batch:') +// console.log(result) +// // console.log('Frames:') +// // console.log(result['frames']) +// // console.log('Frame batches:') +// // console.log(result['frames'][0]['batches']) +// // console.log('Transactions:') +// // console.log(result['frames'][0]['batches'][0]['inner']['transactions']) +// }) +// .catch((error) => { +// console.error('An error occurred:', error) +// }) + +/* +testWithExampleData( + 'example-data/calldata_tx_0xa47e5c4c1b03e60c878612737ff777484d21da0f0740c42d0343aa73d92764c6-pre-delta' +) + .then((result) => { + console.log(result) // Output the result + //decodeOptimismBlob('opstack_blobs_19538908.txt') + //decodeOptimismBlob() + }) + .catch((error) => { + console.error('An error occurred:', error) + }) +*/ + +testWithExampleData('opstack_blobs_19538908.txt') + .then((result) => { + console.log(result) // Output the result + //decodeOptimismBlob('opstack_blobs_19538908.txt') + //decodeOptimismBlob() + }) + .catch((error) => { + console.error('An error occurred:', error) + }) diff --git a/src/index2.ts b/src/index2.ts new file mode 100644 index 0000000..3e1b16d --- /dev/null +++ b/src/index2.ts @@ -0,0 +1,421 @@ +import fs from 'fs' +import rlp from 'rlp' +import { Readable } from 'stream' +import zlib from 'zlib' +import { decompressBatches_v0 } from './batches/batch' +import type { Frames, FramesWithCompressedData } from './frames/frame' +//import { extractFrames_v0 } from './frames/frame' +/** + * Read the binary file and split it into chunks of the specified size. + * @param buffer - The binary data from the file. + * @param chunkSize - The size of each chunk. + * @returns An array of chunks. + */ +function chunks(buffer: Uint8Array, chunkSize: number): Uint8Array[] { + const result = [] + for (let i = 0; i < buffer.length; i += chunkSize) { + result.push(buffer.slice(i, i + chunkSize)) + } + return result +} + +/** + * Convert the byte array to a number. + * @param bytes - The array of bytes to convert. + * @returns The number representation of the bytes. + */ +function bytesToNumber(bytes: Uint8Array): number { + return bytes.reduce((acc, byte, index) => acc + (byte << (8 * (bytes.length - index - 1))), 0) +} + +/** + * Function to process data and extract frames, decoding according to the provided logic. + * @param datas - Array of Uint8Array data chunks to process. + * @returns An array of frames with compressed data. + */ +function processChannelData(datas: Uint8Array[]): FramesWithCompressedData { + const frames: FramesWithCompressedData = [] + + for (let data of datas) { + if (data[0] !== 0) throw new Error('Assertion failed: data[0] must be 0 (derivation version)') + + data = data.slice(1) // Strip prefix byte + + while (data.length > 0) { + console.log(`remaining data bytes: ${data.length}`) + + const channelIdBytes = data.slice(0, 16) + const channelId = Array.from(channelIdBytes) + .map((byte) => byte.toString(16).padStart(2, '0')) + .join('') + + const frameNum = (data[16] << 8) | data[17] // Convert 2 bytes to an integer + console.log(`frame num: ${frameNum}`) + + const frameLength = (data[18] << 24) | (data[19] << 16) | (data[20] << 8) | data[21] // Convert 4 bytes to an integer + console.log('frame data length:', frameLength) + + const end = 16 + 2 + 4 + frameLength + 1 + console.log('end:', end) + + const isLast = data[end - 1] === 1 // Determine if it's the last frame + console.log('is_last:', isLast) + + const frameDataBytes = data.slice(16 + 2 + 4, end - 1) + const frameData = Array.from(frameDataBytes) + .map((byte) => byte.toString(16).padStart(2, '0')) + .join('') + + frames.push({ + channelId, + frameNumber: frameNum, + data: frameData, + isLast + }) + + data = data.slice(end) // Move to the next chunk of data + } + } + + return frames +} + +/** + * Function to incrementally decompress a zlib-compressed data stream. + * @param inputBuffer - The input buffer containing zlib-compressed data. + * @returns A promise that resolves with the decompressed data. + */ +function decompressIncrementally(inputBuffer: Buffer): Promise { + return new Promise((resolve, reject) => { + const inflate = zlib.createInflate({ finishFlush: zlib.constants.Z_SYNC_FLUSH }) + // zlib.createInflate complains like "Error: unexpected end of file" + // zlib.createInflateRaw() complains like "Error: invalid stored block lengths" + const chunks: Buffer[] = [] + + // Create a readable stream from the input buffer + const inputStream = new Readable({ + read() { + this.push(inputBuffer) + this.push(null) // Signal end of input + } + }) + + // Pipe the input stream into the inflate stream + inputStream.pipe(inflate) + + // Collect the decompressed chunks + inflate.on('data', (chunk) => { + chunks.push(chunk) + }) + + // Resolve the promise once decompression is complete + inflate.on('end', () => { + resolve(Buffer.concat(chunks)) + }) + + // Handle errors during decompression + inflate.on('error', (err) => { + reject(err) + }) + }) +} + +/** + * Reads a bit list from a Uint8Array. + * @param length - The number of bits to read. + * @param buffer - The Uint8Array containing the data. + * @param offset - The starting offset. + * @returns An object containing the list of bits and the new offset. + */ +function readBitlist(length: number, buffer: Uint8Array, offset: number): { bits: boolean[]; newOffset: number } { + const bits: boolean[] = [] + let currentOffset = offset + + while (length > 0 && currentOffset < buffer.length) { + const byte = buffer[currentOffset++] + const tempBits: boolean[] = [] + + for (let i = 0; i < Math.min(8, length); i++) { + tempBits.push(((byte >> i) & 1) === 1) + } + + bits.push(...tempBits.reverse()) + length -= 8 + } + + return { bits, newOffset: currentOffset } +} + +/** + * Function to read a variable-length integer (varint) from a Uint8Array. + * @param buffer - The input Uint8Array containing the varint. + * @param offset - The offset at which to start reading. + * @returns An object containing the decoded varint and the new offset. + */ +function readVarint(buffer: Uint8Array, offset: number): { value: number; newOffset: number } { + let result = 0 + let shift = 0 + let currentOffset = offset + + while (currentOffset < buffer.length) { + const byte = buffer[currentOffset++] + result |= (byte & 0b01111111) << shift + if ((byte & 0b10000000) === 0) { + break // Stop if the most significant bit is 0 + } + shift += 7 + } + + return { value: result, newOffset: currentOffset } +} + +/** + * Function to read a specific number of bytes from a Uint8Array. + * @param buffer - The input Uint8Array. + * @param offset - The offset at which to start reading. + * @param length - The number of bytes to read. + * @returns An object containing the read bytes as a hex string and the new offset. + */ +function readBytesAsHex(buffer: Uint8Array, offset: number, length: number): { hex: string; newOffset: number } { + const bytes = buffer.slice(offset, offset + length) + const hex = Array.from(bytes) + .map((byte) => byte.toString(16).padStart(2, '0')) + .join('') + return { hex, newOffset: offset + length } +} + +/** + * Main function to read and process the binary file. + * @param filename - The name of the file to read. + */ +async function processFile(filename: string): Promise { + const blobs = fs.readFileSync(filename) // Read the binary file + const datas: Uint8Array[] = [] + + for (const blob of chunks(blobs, 131072)) { + if (blob[1] !== 0) throw new Error('Assertion failed: blob[1] must be 0') + const declaredLength = bytesToNumber(blob.slice(2, 5)) + console.log('found version 0 blob, declared length:', declaredLength) + let blobData = new Uint8Array() + + for (const chunk of chunks(blob, 128)) { + // split into chunks of 128 bytes + const byteA = chunk[32 * 0] + const byteB = chunk[32 * 1] + const byteC = chunk[32 * 2] + const byteD = chunk[32 * 3] + + if ((byteA | byteB | byteC | byteD) & 0b1100_0000) { + throw new Error('Assertion failed: bytes must meet specific criteria') + } + + const tailA = chunk.slice(32 * 0 + 1, 32 * 1) + const tailB = chunk.slice(32 * 1 + 1, 32 * 2) + const tailC = chunk.slice(32 * 2 + 1, 32 * 3) + const tailD = chunk.slice(32 * 3 + 1, 32 * 4) + + const x = (byteA & 0b0011_1111) | ((byteB & 0b0011_0000) << 2) + const y = (byteB & 0b0000_1111) | ((byteD & 0b0000_1111) << 4) + const z = (byteC & 0b0011_1111) | ((byteD & 0b0011_0000) << 2) + + const result = new Uint8Array(4 * 31 + 3) + result.set(tailA, 0) + result[tailA.length] = x + result.set(tailB, tailA.length + 1) + result[tailA.length + 1 + tailB.length] = y + result.set(tailC, tailA.length + 1 + tailB.length + 1) + result[tailA.length + 1 + tailB.length + 1 + tailC.length] = z + result.set(tailD, tailA.length + 1 + tailB.length + 1 + tailC.length + 1) + + if (result.length !== 4 * 31 + 3) throw new Error('Assertion failed: length of result is incorrect') + + const newBlobData = new Uint8Array(blobData.length + result.length) + newBlobData.set(blobData, 0) + newBlobData.set(result, blobData.length) + blobData = newBlobData + } + + datas.push(blobData.slice(4, declaredLength + 4)) + } + + //const rawFrames = extractFrames_v0(calldata.slice(4)) + //const rawFrames2 = extractFrames_v0(datas.toString()) + const frames: Frames = [] + const channel_parts: string[] = [] + //const rawFrames = processChannelData(datas.slice(4)) + const rawFrames = processChannelData(datas) + // console.log(rawFrames) + + for (const rawFrame of rawFrames) { + console.log('adding frame') + console.log(rawFrame.data.slice(0, 100)) + const buffer = Buffer.from(rawFrame.data, 'hex') + console.log(buffer.slice(0, 100)) + + channel_parts.push(rawFrame.data) + } + const channel = Buffer.from(channel_parts.join(''), 'hex') + + console.log('full channel', channel.length, 'bytes') + //console.log(channel.slice(0, 100).toString()) + console.log(channel.toString('hex').slice(0, 100)) + + /* + decompressIncrementally(channel) + .then((decompressedData) => { + console.log('Decompressed data:', decompressedData.toString()) + }) + .catch((err) => { + console.error('Error decompressing data:', err) + }) + + decompressBatches_v0(channel_parts.join('')) + .then((result) => { + console.log(result) // Output the result decompressed + console.log('result of', result.length, 'bytes:', result.slice(0, 100)) + }) + .catch((error) => { + console.error('An error occurred:', error) + }) + */ + + const fullChannel = channel_parts.join('') + + const decompressed = await decompressBatches_v0(fullChannel) + const dataToDecode: Uint8Array = decompressed + const { data: decoded, remainder } = rlp.decode(dataToDecode, true) + console.log('DECODED:', typeof decoded) + console.log(decoded) + + if (decoded[0] !== 1) { + throw new Error('decoded value is not a span batch') + } + + if (!(decoded instanceof Uint8Array)) { + return + } + + //console.log('timestamp since L2 genesis:', readVarint(decoded.slice(1))) // Decode the varint + + //console.log('result of', result.length, 'bytes:', result.slice(0, 100)) + + let currentOffset = 1 + + const timestampResult = readVarint(decoded, currentOffset) + console.log('timestamp since L2 genesis:', timestampResult.value) + currentOffset = timestampResult.newOffset + + const l1OriginNumberResult = readVarint(decoded, currentOffset) + console.log('last L1 origin number:', l1OriginNumberResult.value) + currentOffset = l1OriginNumberResult.newOffset + + const parentL2BlockHashResult = readBytesAsHex(decoded, currentOffset, 20) + console.log('parent L2 block hash:', parentL2BlockHashResult.hex) + currentOffset = parentL2BlockHashResult.newOffset + + const l1OriginBlockHashResult = readBytesAsHex(decoded, currentOffset, 20) + console.log('L1 origin block hash:', l1OriginBlockHashResult.hex) + currentOffset = l1OriginBlockHashResult.newOffset + + // Read L2 blocks number + const l2BlocksNumberResult = readVarint(decoded, currentOffset) + const l2BlocksNumber = l2BlocksNumberResult.value + currentOffset = l2BlocksNumberResult.newOffset + + console.log('number of L2 blocks:', l2BlocksNumber) + + // Read L1 origin changes bitlist + const originChangesResult = readBitlist(l2BlocksNumber, decoded, currentOffset) + const originChangesCount = originChangesResult.bits.filter((bit) => bit).length + currentOffset = originChangesResult.newOffset + + console.log('how many were changed by L1 origin:', originChangesCount) + + // Read total transactions + let totalTxs = 0 + for (let i = 0; i < l2BlocksNumber; i++) { + const txCountResult = readVarint(decoded, currentOffset) + totalTxs += txCountResult.value + currentOffset = txCountResult.newOffset + } + + console.log('total txs:', totalTxs) + + // Read contract creation transactions number + const contractCreationResult = readBitlist(totalTxs, decoded, currentOffset) + const contractCreationTxsNumber = contractCreationResult.bits.filter((bit) => bit).length + currentOffset = contractCreationResult.newOffset + + console.log('contract creation txs number:', contractCreationTxsNumber) + + /* + // Read y parity bits + const yParityBitsResult = readBitlist(totalTxs, decoded, currentOffset) + currentOffset = yParityBitsResult.newOffset + + // Read transaction signatures, to addresses, and other fields + const txSigs = [] + const txTos = [] + for (let i = 0; i < totalTxs; i++) { + const sigResult = readBytesAsHex(decoded, currentOffset, 64) + txSigs.push(sigResult.hex) + currentOffset = sigResult.newOffset + + const toResult = readBytesAsHex(decoded, currentOffset, 20) + txTos.push(toResult.hex) + currentOffset = toResult.newOffset + } + + // Verify contract creation addresses + const contractCreationCount = txTos.filter((to) => parseInt(to, 16) === 0).length + console.assert(contractCreationCount === contractCreationTxsNumber, 'Contract creation transaction number mismatch') + + // Remaining data processing + const remainingData = decoded.slice(currentOffset) + let p = 0 + let legacyTxsNumber = 0 + const txDatas = [] + + for (let i = 0; i < totalTxs; i++) { + if (remainingData[p] === 1 || remainingData[p] === 2) { + p++ + } else { + legacyTxsNumber++ + } + const txData = rlp.decode(remainingData.slice(p)) as any + txDatas.push(txData) + + const consumedLength = rlp.codec.consumeLengthPrefix(remainingData.slice(p), 0)[2] as number + p += consumedLength + } + + console.log('legacy txs number:', legacyTxsNumber) + + // Calculate nonce values + const txNonces = [] + for (let i = 0; i < totalTxs; i++) { + const nonceResult = readVarint(decoded, currentOffset) + txNonces.push(nonceResult.value) + currentOffset = nonceResult.newOffset + } + + // Calculate total gas + let totalGasLimit = 0 + for (let i = 0; i < totalTxs; i++) { + const gasLimitResult = readVarint(decoded, currentOffset) + totalGasLimit += gasLimitResult.value + currentOffset = gasLimitResult.newOffset + } + + console.log('total gas limit in txs:', totalGasLimit) + + // Calculate protected legacy transactions + const protectedLegacyTxsResult = readBitlist(legacyTxsNumber, decoded, currentOffset) + const protectedLegacyTxsCount = protectedLegacyTxsResult.bits.filter((bit) => bit).length + console.log('number of EIP-155 protected legacy txs:', protectedLegacyTxsCount) + */ +} + +// Example usage +const filename = 'opstack_blobs_19538908.bin' // Replace with your binary file +processFile(filename) diff --git a/src/transactions/batcherTransaction.ts b/src/transactions/batcherTransaction.ts index a8d0aa0..8e66675 100644 --- a/src/transactions/batcherTransaction.ts +++ b/src/transactions/batcherTransaction.ts @@ -1,4 +1,4 @@ -import { Frames, extractFrames, addBatchesToFrame } from '../frames/frame' +import { Frames, addBatchesToFrame_v0, extractFrames_v0 } from '../frames/frame' export type BatcherTransaction = { version: number @@ -13,15 +13,20 @@ export const extractBatcherTransaction = async (calldata: string): Promise Date: Tue, 3 Sep 2024 18:51:35 +0200 Subject: [PATCH 2/2] cleanup --- package-lock.json | 110 ----------------------- package.json | 1 - src/batches/RawSpanBatch.ts | 14 +-- src/batches/batch.ts | 21 +---- src/frames/frame.ts | 19 +--- src/index.ts | 64 ++------------ src/index2.ts | 118 +++---------------------- src/transactions/batcherTransaction.ts | 10 +-- 8 files changed, 36 insertions(+), 321 deletions(-) diff --git a/package-lock.json b/package-lock.json index b3c183e..54358d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,7 +12,6 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", - "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" @@ -2220,11 +2219,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/aes-js": { - "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" - }, "node_modules/agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -3486,68 +3480,6 @@ "rlp": "bin/rlp" } }, - "node_modules/ethers": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", - "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/ethers-io/" - }, - { - "type": "individual", - "url": "https://www.buymeacoffee.com/ricmoo" - } - ], - "dependencies": { - "@adraffy/ens-normalize": "1.10.1", - "@noble/curves": "1.2.0", - "@noble/hashes": "1.3.2", - "@types/node": "18.15.13", - "aes-js": "4.0.0-beta.5", - "tslib": "2.4.0", - "ws": "8.17.1" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/ethers/node_modules/@adraffy/ens-normalize": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" - }, - "node_modules/ethers/node_modules/@types/node": { - "version": "18.15.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", - "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" - }, - "node_modules/ethers/node_modules/tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "node_modules/ethers/node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -11304,11 +11236,6 @@ "dev": true, "requires": {} }, - "aes-js": { - "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" - }, "agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -12262,43 +12189,6 @@ } } }, - "ethers": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", - "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", - "requires": { - "@adraffy/ens-normalize": "1.10.1", - "@noble/curves": "1.2.0", - "@noble/hashes": "1.3.2", - "@types/node": "18.15.13", - "aes-js": "4.0.0-beta.5", - "tslib": "2.4.0", - "ws": "8.17.1" - }, - "dependencies": { - "@adraffy/ens-normalize": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" - }, - "@types/node": { - "version": "18.15.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", - "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" - }, - "tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "requires": {} - } - } - }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", diff --git a/package.json b/package.json index f3c9f8b..99a1f89 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,6 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", - "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" diff --git a/src/batches/RawSpanBatch.ts b/src/batches/RawSpanBatch.ts index 8edea15..388d696 100644 --- a/src/batches/RawSpanBatch.ts +++ b/src/batches/RawSpanBatch.ts @@ -23,19 +23,9 @@ type spanBatchPayload struct { export class RawSpanBatch { // eslint-disable-next-line @typescript-eslint/no-unused-vars static decode(data: Uint8Array | NestedUint8Array): InnerBatch { - console.log('data0') - // console.log(data[0]) // TODO: implement: prefix ++ payload - // https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49 // const decoded = rlp.decode(data) - // return { data } as InnerBatch - - return { - parentHash: data.toString(), - epochNum: 0, - epochHash: '0x', - timestamp: 3, - transactions: [] - } + // https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49 + return {} as InnerBatch } } diff --git a/src/batches/batch.ts b/src/batches/batch.ts index ec9e7cd..b88dcf9 100644 --- a/src/batches/batch.ts +++ b/src/batches/batch.ts @@ -1,4 +1,3 @@ -import fs from 'fs' import rlp, { NestedUint8Array } from 'rlp' import stream from 'stream' import zlib from 'zlib' @@ -43,8 +42,7 @@ enum BatchType { const MAX_BYTES_PER_CHANNEL = 10_000_000 export const parseBatchesData = async (compressedBatches: string): Promise => { - console.log('parsing') - const decompressed = await decompressBatches_v0(compressedBatches) + const decompressed = await decompressBatches(compressedBatches) const decodedBatches: Batches = [] let dataToDecode: Uint8Array = decompressed while (dataToDecode?.length) { @@ -55,24 +53,15 @@ export const parseBatchesData = async (compressedBatches: string): Promise => { +export const decompressBatches = async (compressedBatches: string): Promise => { const inputBuffer = Buffer.from(compressedBatches, 'hex') - console.log('decompressing', inputBuffer.length, 'bytes') - - fs.writeFileSync('blob1_ts.test', inputBuffer) - console.log('written blob1_ts.test') - - //console.log(inputBuffer) - console.log(compressedBatches.slice(0, 100)) - console.log(inputBuffer.toString('hex').slice(0, 100)) try { // Decompress the input buffer const decompress = zlib.createInflate({ maxOutputLength: MAX_BYTES_PER_CHANNEL, - finishFlush: zlib.constants.Z_SYNC_FLUSH + finishFlush: zlib.constants.Z_SYNC_FLUSH // required when decompressing span batches, otherwise "Error: unexpected end of file" }) - //const decompress = zlib.createInflate() const decompressStream = stream.Readable.from(inputBuffer) const chunks: Buffer[] = [] @@ -86,15 +75,13 @@ export const decompressBatches_v0 = async (compressedBatches: string): Promise { +const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => { if (decodedBatch.length < 1) throw new Error('Batch too short') // first byte is the batch type switch (decodedBatch[0]) { case BatchType.SingularBatch: return { inner: SingularBatch.decode(decodedBatch.slice(1)) } case BatchType.SpanBatch: - console.error('SpanBatch is not implemented') - //return { inner: decodedBatch } return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) } default: throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`) diff --git a/src/frames/frame.ts b/src/frames/frame.ts index f97e75f..853c0be 100644 --- a/src/frames/frame.ts +++ b/src/frames/frame.ts @@ -24,7 +24,7 @@ const BYTES_4_LENGTH = 4 * BYTE_CHARS const BYTES_13_LENGTH = 13 * BYTE_CHARS const BYTES_16_LENGTH = 16 * BYTE_CHARS -export const extractFrames_v0 = (data: string): FramesWithCompressedData => { +export const extractFrames = (data: string): FramesWithCompressedData => { const frames: FramesWithCompressedData = [] let offset = 0 while (offset < data.length) { @@ -50,9 +50,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => { const frameDataLength = frameDataLengthInBytes * BYTE_CHARS if (frameDataLengthInBytes > MAX_FRAME_LENGTH || offset + frameDataLength > data.length) { - throw new Error( - `Frame data length is too large or exceeds buffer length: ${frameDataLengthInBytes}, ${data.length}, ${offset + frameDataLength}` - ) + throw new Error('Frame data length is too large or exceeds buffer length') } const frameData = `${data.slice(offset, offset + frameDataLength)}` @@ -73,7 +71,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => { return frames } -export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Promise => { +export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise => { const batches = await parseBatchesData(frame.data) return { channelId: frame.channelId, @@ -82,14 +80,3 @@ export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Prom batches } } - -export const addBatchesToFrame_v1 = async (channel: string): Promise => { - const batches = await parseBatchesData(channel) - return { - // FIXME - channelId: 'asdfg', - frameNumber: 0, - isLast: true, - batches - } -} diff --git a/src/index.ts b/src/index.ts index b39fae8..c9f38f4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,26 +3,6 @@ import fs from 'fs' import path from 'path' import { BatcherTransaction, extractBatcherTransaction } from './transactions/batcherTransaction' -/** - * Convert a binary file to a text file where text is the hexadecimal representation. - * @param inputFilePath Path to the binary input file. - * @param outputFilePath Path to the output text file. - */ -function convertBinaryToHex(inputFilePath: string, outputFilePath: string): void { - // Read the binary file into a Buffer - const binaryData = fs.readFileSync(inputFilePath) - - // Convert the binary data to a hexadecimal string - const hexString = binaryData.toString('hex') - - // TODO: add leading 0x - - // Write the hexadecimal string to the output file - fs.writeFileSync(outputFilePath, hexString) - - console.log(`Successfully converted ${inputFilePath} to hexadecimal format and saved as ${outputFilePath}`) -} - export const testWithExampleData = async ( filePath: string = 'example-data/calldata.txt' ): Promise => { @@ -43,42 +23,16 @@ export const decodeBatcherTransactionCalldata = async (calldata: string): Promis return await extractBatcherTransaction(calldata) } -//convertBinaryToHex('opstack_blobs_19538908.bin', 'opstack_blobs_19538908.txt') -// -// testWithExampleData() -// .then((result) => { -// console.log('Batch:') -// console.log(result) -// // console.log('Frames:') -// // console.log(result['frames']) -// // console.log('Frame batches:') -// // console.log(result['frames'][0]['batches']) -// // console.log('Transactions:') -// // console.log(result['frames'][0]['batches'][0]['inner']['transactions']) -// }) -// .catch((error) => { -// console.error('An error occurred:', error) -// }) - -/* -testWithExampleData( - 'example-data/calldata_tx_0xa47e5c4c1b03e60c878612737ff777484d21da0f0740c42d0343aa73d92764c6-pre-delta' -) - .then((result) => { - console.log(result) // Output the result - //decodeOptimismBlob('opstack_blobs_19538908.txt') - //decodeOptimismBlob() - }) - .catch((error) => { - console.error('An error occurred:', error) - }) -*/ - -testWithExampleData('opstack_blobs_19538908.txt') +testWithExampleData() .then((result) => { - console.log(result) // Output the result - //decodeOptimismBlob('opstack_blobs_19538908.txt') - //decodeOptimismBlob() + console.log('Batch:') + console.log(result) + // console.log('Frames:') + // console.log(result['frames']) + // console.log('Frame batches:') + // console.log(result['frames'][0]['batches']) + // console.log('Transactions:') + // console.log(result['frames'][0]['batches'][0]['inner']['transactions']) }) .catch((error) => { console.error('An error occurred:', error) diff --git a/src/index2.ts b/src/index2.ts index 3e1b16d..07d0569 100644 --- a/src/index2.ts +++ b/src/index2.ts @@ -1,10 +1,7 @@ import fs from 'fs' import rlp from 'rlp' -import { Readable } from 'stream' -import zlib from 'zlib' -import { decompressBatches_v0 } from './batches/batch' -import type { Frames, FramesWithCompressedData } from './frames/frame' -//import { extractFrames_v0 } from './frames/frame' +import { decompressBatches } from './batches/batch' + /** * Read the binary file and split it into chunks of the specified size. * @param buffer - The binary data from the file. @@ -33,8 +30,8 @@ function bytesToNumber(bytes: Uint8Array): number { * @param datas - Array of Uint8Array data chunks to process. * @returns An array of frames with compressed data. */ -function processChannelData(datas: Uint8Array[]): FramesWithCompressedData { - const frames: FramesWithCompressedData = [] +function processChannelData(datas: Uint8Array[]): string { + const frames: string[] = [] for (let data of datas) { if (data[0] !== 0) throw new Error('Assertion failed: data[0] must be 0 (derivation version)') @@ -66,58 +63,18 @@ function processChannelData(datas: Uint8Array[]): FramesWithCompressedData { .map((byte) => byte.toString(16).padStart(2, '0')) .join('') - frames.push({ - channelId, - frameNumber: frameNum, - data: frameData, - isLast - }) + frames.push(frameData) data = data.slice(end) // Move to the next chunk of data } } - return frames -} + const channel = Buffer.from(frames.join(''), 'hex') + console.log('full channel', channel.length, 'bytes') + //console.log(channel.slice(0, 100).toString()) + console.log(channel.toString('hex').slice(0, 100)) -/** - * Function to incrementally decompress a zlib-compressed data stream. - * @param inputBuffer - The input buffer containing zlib-compressed data. - * @returns A promise that resolves with the decompressed data. - */ -function decompressIncrementally(inputBuffer: Buffer): Promise { - return new Promise((resolve, reject) => { - const inflate = zlib.createInflate({ finishFlush: zlib.constants.Z_SYNC_FLUSH }) - // zlib.createInflate complains like "Error: unexpected end of file" - // zlib.createInflateRaw() complains like "Error: invalid stored block lengths" - const chunks: Buffer[] = [] - - // Create a readable stream from the input buffer - const inputStream = new Readable({ - read() { - this.push(inputBuffer) - this.push(null) // Signal end of input - } - }) - - // Pipe the input stream into the inflate stream - inputStream.pipe(inflate) - - // Collect the decompressed chunks - inflate.on('data', (chunk) => { - chunks.push(chunk) - }) - - // Resolve the promise once decompression is complete - inflate.on('end', () => { - resolve(Buffer.concat(chunks)) - }) - - // Handle errors during decompression - inflate.on('error', (err) => { - reject(err) - }) - }) + return frames.join('') } /** @@ -238,55 +195,13 @@ async function processFile(filename: string): Promise { datas.push(blobData.slice(4, declaredLength + 4)) } - //const rawFrames = extractFrames_v0(calldata.slice(4)) - //const rawFrames2 = extractFrames_v0(datas.toString()) - const frames: Frames = [] - const channel_parts: string[] = [] - //const rawFrames = processChannelData(datas.slice(4)) - const rawFrames = processChannelData(datas) - // console.log(rawFrames) - - for (const rawFrame of rawFrames) { - console.log('adding frame') - console.log(rawFrame.data.slice(0, 100)) - const buffer = Buffer.from(rawFrame.data, 'hex') - console.log(buffer.slice(0, 100)) - - channel_parts.push(rawFrame.data) - } - const channel = Buffer.from(channel_parts.join(''), 'hex') + const fullChannel = processChannelData(datas) - console.log('full channel', channel.length, 'bytes') - //console.log(channel.slice(0, 100).toString()) - console.log(channel.toString('hex').slice(0, 100)) - - /* - decompressIncrementally(channel) - .then((decompressedData) => { - console.log('Decompressed data:', decompressedData.toString()) - }) - .catch((err) => { - console.error('Error decompressing data:', err) - }) - - decompressBatches_v0(channel_parts.join('')) - .then((result) => { - console.log(result) // Output the result decompressed - console.log('result of', result.length, 'bytes:', result.slice(0, 100)) - }) - .catch((error) => { - console.error('An error occurred:', error) - }) - */ - - const fullChannel = channel_parts.join('') - - const decompressed = await decompressBatches_v0(fullChannel) + const decompressed = await decompressBatches(fullChannel) const dataToDecode: Uint8Array = decompressed const { data: decoded, remainder } = rlp.decode(dataToDecode, true) - console.log('DECODED:', typeof decoded) - console.log(decoded) + console.log('result of', decoded.length, 'bytes:', decoded.slice(0, 100), '\n') if (decoded[0] !== 1) { throw new Error('decoded value is not a span batch') } @@ -295,10 +210,6 @@ async function processFile(filename: string): Promise { return } - //console.log('timestamp since L2 genesis:', readVarint(decoded.slice(1))) // Decode the varint - - //console.log('result of', result.length, 'bytes:', result.slice(0, 100)) - let currentOffset = 1 const timestampResult = readVarint(decoded, currentOffset) @@ -416,6 +327,5 @@ async function processFile(filename: string): Promise { */ } -// Example usage -const filename = 'opstack_blobs_19538908.bin' // Replace with your binary file +const filename = 'opstack_blobs_19538908.bin' processFile(filename) diff --git a/src/transactions/batcherTransaction.ts b/src/transactions/batcherTransaction.ts index 8e66675..6def1ea 100644 --- a/src/transactions/batcherTransaction.ts +++ b/src/transactions/batcherTransaction.ts @@ -1,4 +1,4 @@ -import { Frames, addBatchesToFrame_v0, extractFrames_v0 } from '../frames/frame' +import { Frames, addBatchesToFrame, extractFrames } from '../frames/frame' export type BatcherTransaction = { version: number @@ -13,8 +13,6 @@ export const extractBatcherTransaction = async (calldata: string): Promise