Skip to content

Commit ee5da91

Browse files
authored
feat: derivation pipeline (#40)
* test: move calldata to files * feat: batch header decoding * feat: improve codec interface * chore: manifests fixes * feat: revert some codec changes * feat: wip derivation pipeline * feat: batch header v7 * feat: add batch abstraction * feat: basic derivation pipeline * feat: implement batch data hash * feat: move PayloadData * feat: improve batch data hash computation * test: derivation * chore: cleaning * fix: lints * fix: lints * fix: skip wasm for derivation pipeline * fix: data hash computation for batch * fix: lint * fix: lint * fix: lints * fix: answer comments * fix: lints
1 parent 8389d1b commit ee5da91

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+1740
-231
lines changed

.github/workflows/lint.yaml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,10 +127,12 @@ jobs:
127127
include:
128128
- type: wasm
129129
target: wasm32-unknown-unknown
130-
exclude: scroll-engine,scroll-wire,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,rollup-node
130+
exclude: |
131+
scroll-engine,scroll-wire,rollup-node,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,scroll-derivation-pipeline
131132
- type: riscv
132133
target: riscv32imac-unknown-none-elf
133-
exclude: scroll-engine,scroll-wire,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,rollup-node,scroll-codec
134+
exclude: |
135+
scroll-engine,scroll-wire,rollup-node,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,scroll-codec,scroll-derivation-pipeline
134136
steps:
135137
- uses: actions/checkout@v4
136138
- uses: rui314/setup-mold@v1

Cargo.lock

Lines changed: 15 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,10 @@ exclude = [".github/"]
88
[workspace]
99
members = [
1010
"bin/rollup",
11+
"crates/codec",
1112
"crates/database/db",
1213
"crates/database/migration",
13-
"crates/codec",
14+
"crates/derivation-pipeline",
1415
"crates/engine",
1516
"crates/indexer",
1617
"crates/l1",
@@ -127,8 +128,9 @@ alloy-transport = { version = "0.12.2", default-features = false }
127128

128129
# scroll-alloy
129130
scroll-alloy-consensus = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
130-
scroll-alloy-provider = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
131131
scroll-alloy-network = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
132+
scroll-alloy-provider = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
133+
scroll-alloy-rpc-types-engine = { git = "https://github.com/scroll-tech/reth.git" }
132134

133135
# reth
134136
reth-eth-wire-types = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
@@ -151,8 +153,9 @@ rollup-node-indexer = { path = "crates/indexer" }
151153
rollup-node-manager = { path = "crates/node" }
152154
rollup-node-primitives = { path = "crates/primitives" }
153155
rollup-node-watcher = { path = "crates/watcher" }
154-
scroll-db = { path = "crates/database/db" }
155156
scroll-codec = { path = "crates/codec" }
157+
scroll-db = { path = "crates/database/db" }
158+
scroll-derivation-pipeline = { path = "crates/derivation-pipeline" }
156159
scroll-engine = { path = "crates/engine" }
157160
scroll-l1 = { path = "crates/l1" }
158161
scroll-network = { path = "crates/network" }

bin/rollup/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ alloy-rpc-types-engine.workspace = true
1414

1515
# scroll-alloy
1616
scroll-alloy-consensus.workspace = true
17-
scroll-alloy-rpc-types-engine = { git = "https://github.com/scroll-tech/reth.git" }
17+
scroll-alloy-rpc-types-engine.workspace = true
1818
scroll-alloy-provider.workspace = true
1919

2020
# reth

crates/codec/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ edition = "2021"
55

66
[dependencies]
77
# alloy
8+
scroll-alloy-consensus.workspace = true
89
alloy-eips.workspace = true
910
alloy-primitives.workspace = true
1011
alloy-rlp = { version = "0.3", default-features = false }
@@ -21,6 +22,7 @@ zstd = "0.13"
2122

2223
[dev-dependencies]
2324
eyre.workspace = true
25+
serde_json = "1.0"
2426

2527
[features]
2628
test-utils = ["dep:eyre", "scroll-l1/test-utils"]

crates/codec/src/block.rs

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,27 @@ pub struct BlockContext {
3131
pub base_fee: U256,
3232
/// The block gas limit.
3333
pub gas_limit: u64,
34+
/// The block's total transaction count.
35+
pub num_transactions: u16,
3436
/// The block's l1 message count.
3537
pub num_l1_messages: u16,
3638
}
39+
40+
impl BlockContext {
41+
pub const BYTES_LENGTH: usize = 60;
42+
43+
/// Returns an owned array which contains all fields of the [`BlockContext`].
44+
pub fn to_be_bytes(&self) -> [u8; Self::BYTES_LENGTH] {
45+
let mut buf = [0u8; Self::BYTES_LENGTH];
46+
47+
buf[..8].copy_from_slice(&self.number.to_be_bytes());
48+
buf[8..16].copy_from_slice(&self.timestamp.to_be_bytes());
49+
if self.base_fee != U256::ZERO {
50+
buf[16..48].copy_from_slice(&self.base_fee.to_be_bytes::<32>());
51+
}
52+
buf[48..56].copy_from_slice(&self.gas_limit.to_be_bytes());
53+
buf[56..58].copy_from_slice(&self.num_transactions.to_be_bytes());
54+
buf[58..].copy_from_slice(&self.num_l1_messages.to_be_bytes());
55+
buf
56+
}
57+
}

crates/codec/src/decoding/batch.rs

Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
use crate::{
2+
decoding::{constants::KECCAK_256_DIGEST_BYTES_SIZE, payload::PayloadData},
3+
BlockContext, L2Block,
4+
};
5+
6+
use alloy_primitives::{bytes::BufMut, keccak256, B256};
7+
use scroll_alloy_consensus::TxL1Message;
8+
9+
/// The deserialized batch data.
10+
#[derive(Debug, Clone, PartialEq, Eq)]
11+
pub struct Batch {
12+
/// The batch version.
13+
pub version: u8,
14+
/// The amount of blocks for each chunk of the batch. Only relevant for codec versions v0 ->
15+
/// v6.
16+
pub chunks_block_count: Option<Vec<usize>>,
17+
/// The data for the batch.
18+
pub data: PayloadData,
19+
}
20+
21+
impl Batch {
22+
/// Returns a new instance of a batch.
23+
pub fn new(version: u8, chunks_block_count: Option<Vec<usize>>, data: PayloadData) -> Self {
24+
Self { version, chunks_block_count, data }
25+
}
26+
27+
/// Computes the data hash for the batch, using the provided L1 messages associated with each
28+
/// block.
29+
pub fn try_compute_data_hash(&self, l1_messages: &[TxL1Message]) -> Option<B256> {
30+
// From version 7 and above, the batch doesn't have a data hash.
31+
if self.version >= 7 {
32+
return None;
33+
}
34+
35+
let total_l1_messages: usize =
36+
self.data.l2_blocks().iter().map(|b| b.context.num_l1_messages as usize).sum();
37+
debug_assert_eq!(total_l1_messages, l1_messages.len(), "invalid l1 messages count");
38+
39+
let chunks_count = self.chunks_block_count.as_ref()?;
40+
let blocks_buf = &mut (&**self.data.l2_blocks());
41+
let l1_messages_buf = &mut (&*l1_messages);
42+
43+
let mut chunk_hashes =
44+
Vec::with_capacity(chunks_count.len() * KECCAK_256_DIGEST_BYTES_SIZE);
45+
46+
for chunk_count in chunks_count {
47+
// slice the blocks at chunk_count.
48+
let blocks = blocks_buf.get(..*chunk_count)?;
49+
50+
// take the correct amount of l1 messages for each block and advance the buffer.
51+
let l1_messages_per_block = blocks
52+
.iter()
53+
.map(|b| {
54+
let num_l1_messages = b.context.num_l1_messages as usize;
55+
let block_messages = l1_messages_buf.get(..num_l1_messages).unwrap_or(&[]);
56+
*l1_messages_buf = l1_messages_buf.get(num_l1_messages..).unwrap_or(&[]);
57+
block_messages
58+
})
59+
.collect::<Vec<_>>();
60+
61+
// compute the chunk data hash.
62+
chunk_hashes
63+
.append(&mut hash_chunk(self.version, blocks, l1_messages_per_block).to_vec());
64+
65+
// advance the buffer.
66+
*blocks_buf = blocks_buf.get(*chunk_count..).unwrap_or(&[]);
67+
}
68+
69+
Some(keccak256(chunk_hashes))
70+
}
71+
}
72+
73+
/// Compute the hash for the chunk.
74+
fn hash_chunk(
75+
version: u8,
76+
l2_blocks: &[L2Block],
77+
l1_messages_per_block: Vec<&[TxL1Message]>,
78+
) -> B256 {
79+
// reserve the correct capacity.
80+
let l1_messages_count: usize =
81+
l1_messages_per_block.iter().map(|messages| messages.len()).sum();
82+
let mut capacity = l2_blocks.len() * (BlockContext::BYTES_LENGTH - 2) +
83+
l1_messages_count * KECCAK_256_DIGEST_BYTES_SIZE;
84+
if version == 0 {
85+
capacity += l2_blocks.iter().map(|b| b.transactions.len()).sum::<usize>();
86+
}
87+
let mut buf = Vec::with_capacity(capacity);
88+
89+
for block in l2_blocks {
90+
let context = block.context.to_be_bytes();
91+
// we don't use the last 2 bytes.
92+
// <https://github.com/scroll-tech/da-codec/blob/main/encoding/codecv0_types.go#L175>
93+
buf.put_slice(&context[..BlockContext::BYTES_LENGTH - 2]);
94+
}
95+
96+
for (block, l1_messages) in l2_blocks.iter().zip(l1_messages_per_block) {
97+
for l1_message in l1_messages {
98+
buf.put_slice(l1_message.tx_hash().as_slice())
99+
}
100+
101+
// for v0, we add the l2 transaction hashes.
102+
if version == 0 {
103+
for tx in &block.transactions {
104+
buf.put_slice(keccak256(&tx.0).as_slice());
105+
}
106+
}
107+
}
108+
109+
keccak256(buf)
110+
}
111+
112+
#[cfg(test)]
113+
mod tests {
114+
use crate::decoding::{test_utils::read_to_bytes, v0::decode_v0, v1::decode_v1};
115+
116+
use alloy_primitives::{address, b256, bytes, U256};
117+
use scroll_alloy_consensus::TxL1Message;
118+
119+
#[test]
120+
fn test_should_compute_data_hash_v0() -> eyre::Result<()> {
121+
// <https://etherscan.io/tx/0x2c7bb77d6086befd9bdcf936479fd246d1065cbd2c6aff55b1d39a67aff965c1>
122+
let raw_calldata = read_to_bytes("./testdata/calldata_v0.bin")?;
123+
let batch = decode_v0(&raw_calldata)?;
124+
125+
let hash = batch.try_compute_data_hash(&[]).unwrap();
126+
127+
assert_eq!(hash, b256!("33e608dbf683c1ee03a34d01de52f67d60a0563b7e713b65a7395bb3b646f71f"));
128+
129+
Ok(())
130+
}
131+
132+
#[test]
133+
fn test_should_compute_data_hash_v0_with_l1_messages() -> eyre::Result<()> {
134+
// <https://etherscan.io/tx/0xdc0a315b25b46f4c1085e3884c63f8ede61e984e47655f7667e5f14e3df55f82>
135+
let raw_calldata = read_to_bytes("./testdata/calldata_v0_with_l1_messages.bin")?;
136+
let batch = decode_v0(&raw_calldata)?;
137+
138+
let hash = batch
139+
.try_compute_data_hash(&[
140+
TxL1Message {
141+
queue_index: 39,
142+
gas_limit: 180000,
143+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
144+
value: U256::ZERO,
145+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
146+
input: bytes!("8ef1332e000000000000000000000000f1af3b23de0a5ca3cab7261cb0061c0d779a5c7b00000000000000000000000033b60d5dd260d453cac3782b0bdc01ce846721420000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002700000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e48431f5c1000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000006efdbff2a14a7c8e15944d1f4a48f9f95f663a4000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb00000000000000000000000000000000000000000000000000000005d21dba0000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
147+
},
148+
TxL1Message {
149+
queue_index: 40,
150+
gas_limit: 168000,
151+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
152+
value: U256::ZERO,
153+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
154+
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf00000000000000000000000000000000000000000000000000011c37937e08000000000000000000000000000000000000000000000000000000000000000002800000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e8748000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c0000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
155+
},
156+
TxL1Message {
157+
queue_index: 41,
158+
gas_limit: 168000,
159+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
160+
value: U256::ZERO,
161+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
162+
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf0000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000000000000000002900000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000003219c394111d45757ccb68a4fd353b4f7f9660960000000000000000000000003219c394111d45757ccb68a4fd353b4f7f966096000000000000000000000000000000000000000000000000002386f26fc100000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
163+
},
164+
])
165+
.unwrap();
166+
167+
assert_eq!(hash, b256!("55fd647c58461d910b5bfb4539f2177ba575c9c8d578a344558976a4375cc287"));
168+
169+
Ok(())
170+
}
171+
172+
#[test]
173+
fn test_should_compute_data_hash_v1() -> eyre::Result<()> {
174+
// <https://etherscan.io/tx/0x27d73eef6f0de411f8db966f0def9f28c312a0ae5cfb1ac09ec23f8fa18b005b>
175+
let raw_calldata = read_to_bytes("./testdata/calldata_v1.bin")?;
176+
let blob = read_to_bytes("./testdata/blob_v1.bin")?;
177+
let batch = decode_v1(&raw_calldata, &blob)?;
178+
179+
let hash = batch.try_compute_data_hash(&[]).unwrap();
180+
181+
assert_eq!(hash, b256!("c20f5914a772663080f8a77955b33814a04f7a19c880536e562a1bcfd5343a37"));
182+
183+
Ok(())
184+
}
185+
186+
#[test]
187+
fn test_should_compute_data_hash_v1_with_l1_messages() -> eyre::Result<()> {
188+
// <https://etherscan.io/tx/0x30451fc1a7ad4a87f9a2616e972d2489326bafa2a41aba8cfb664aec5f727d94>
189+
let raw_calldata = read_to_bytes("./testdata/calldata_v1_with_l1_messages.bin")?;
190+
let raw_blob = read_to_bytes("./testdata/blob_v1_with_l1_messages.bin")?;
191+
let batch = decode_v1(&raw_calldata, &raw_blob)?;
192+
193+
let l1_messages: Vec<TxL1Message> =
194+
serde_json::from_str(&std::fs::read_to_string("./testdata/l1_messages_v1.json")?)?;
195+
196+
let hash = batch.try_compute_data_hash(&l1_messages).unwrap();
197+
198+
assert_eq!(hash, b256!("e20ac534891e7f96c3a945e2aafe0a05c7079959eccd94ad217ee0f3b29ac030"));
199+
200+
Ok(())
201+
}
202+
}

crates/codec/src/decoding/blob.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ mod tests {
3737

3838
#[test]
3939
fn test_should_skip_unused_blob_bytes() -> eyre::Result<()> {
40-
let blob = read_to_bytes("./src/testdata/blob_v1.bin")?;
40+
let blob = read_to_bytes("./testdata/blob_v1.bin")?;
4141
let iterator = BlobSliceIter::from_blob_slice(&blob);
4242

4343
let val = iterator.take(256).copied().collect::<Vec<_>>();
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
/// The length in bytes of the Keccak 256 hash digest.
2+
pub const KECCAK_256_DIGEST_BYTES_SIZE: usize = 32;
3+
4+
/// The length in bytes of each item in the skipped L1 messages bitmap.
5+
pub const SKIPPED_L1_MESSAGE_BITMAP_ITEM_BYTES_SIZE: usize = 32;

crates/codec/src/decoding/macros.rs

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,23 @@ macro_rules! from_be_bytes_slice_and_advance_buf {
88
let mut arr = [0u8; ::std::mem::size_of::<$ty>()];
99
let size = $size;
1010
let size_of = ::std::mem::size_of::<$ty>();
11-
arr[size_of - size..].copy_from_slice(&$slice[0..size]);
11+
arr[size_of - size..].copy_from_slice(&$slice[..size]);
1212
::alloy_primitives::bytes::Buf::advance($slice, size);
1313
<$ty>::from_be_bytes(arr)
1414
}};
1515
}
1616

17+
/// Calls `from_slice` on the provided type using the passed in buffer and advances it.
18+
#[macro_export]
19+
macro_rules! from_slice_and_advance_buf {
20+
($ty:ty, $slice: expr) => {{
21+
let size_of = ::std::mem::size_of::<$ty>();
22+
let t = <$ty>::from_slice(&$slice[..size_of]);
23+
::alloy_primitives::bytes::Buf::advance($slice, size_of);
24+
t
25+
}};
26+
}
27+
1728
/// Check the buffer input to have the required length. Returns an Eof error otherwise.
1829
#[macro_export]
1930
macro_rules! check_buf_len {

0 commit comments

Comments
 (0)