Skip to content
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -128,11 +128,11 @@ jobs:
- type: wasm
target: wasm32-unknown-unknown
exclude: |
scroll-engine,scroll-wire,scroll-bridge,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer
scroll-engine,scroll-wire,scroll-bridge,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,scroll-derivation-pipeline
- type: riscv
target: riscv32imac-unknown-none-elf
exclude: |
scroll-engine,scroll-wire,scroll-bridge,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,scroll-codec
scroll-engine,scroll-wire,scroll-bridge,scroll-network,rollup-node-manager,rollup-node-watcher,scroll-db,scroll-migration,rollup-node-indexer,scroll-codec,scroll-derivation-pipeline
steps:
- uses: actions/checkout@v4
- uses: rui314/setup-mold@v1
Expand Down
14 changes: 14 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 6 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ exclude = [".github/"]
[workspace]
members = [
"bin/bridge",
"crates/codec",
"crates/database/db",
"crates/database/migration",
"crates/codec",
"crates/derivation-pipeline",
"crates/engine",
"crates/indexer",
"crates/l1",
Expand Down Expand Up @@ -127,8 +128,9 @@ alloy-transport = { version = "0.12.2", default-features = false }

# scroll-alloy
scroll-alloy-consensus = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
scroll-alloy-provider = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
scroll-alloy-network = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
scroll-alloy-provider = { git = "https://github.com/scroll-tech/reth.git", default-features = false }
scroll-alloy-rpc-types-engine = { git = "https://github.com/scroll-tech/reth.git" }

# reth
reth-eth-wire-types = { git = "https://github.com/scroll-tech/reth.git" }
Expand All @@ -150,8 +152,9 @@ reth-scroll-primitives = { git = "https://github.com/scroll-tech/reth.git", defa
rollup-node-manager = { path = "crates/node" }
rollup-node-primitives = { path = "crates/primitives" }
rollup-node-watcher = { path = "crates/watcher" }
scroll-db = { path = "crates/database/db" }
scroll-codec = { path = "crates/codec" }
scroll-db = { path = "crates/database/db" }
scroll-derivation-pipeline = { path = "crates/derivation-pipeline" }
scroll-engine = { path = "crates/engine" }
scroll-l1 = { path = "crates/l1" }
scroll-network = { path = "crates/network" }
Expand Down
2 changes: 1 addition & 1 deletion bin/bridge/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ alloy-rpc-types-engine.workspace = true

# scroll-alloy
scroll-alloy-consensus.workspace = true
scroll-alloy-rpc-types-engine = { git = "https://github.com/scroll-tech/reth.git" }
scroll-alloy-rpc-types-engine.workspace = true
scroll-alloy-provider.workspace = true

# reth
Expand Down
1 change: 1 addition & 0 deletions crates/codec/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ edition = "2024"

[dependencies]
# alloy
scroll-alloy-consensus.workspace = true
alloy-eips.workspace = true
alloy-primitives.workspace = true
alloy-rlp = { version = "0.3", default-features = false }
Expand Down
21 changes: 21 additions & 0 deletions crates/codec/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,27 @@ pub struct BlockContext {
pub base_fee: U256,
/// The block gas limit.
pub gas_limit: u64,
/// The block's total transaction count.
pub num_transactions: u16,
/// The block's l1 message count.
pub num_l1_messages: u16,
}

impl BlockContext {
pub const BYTES_LENGTH: usize = 60;

/// Pushes all fields of the [`BlockContext`] into the provided buf.
pub fn to_be_bytes(&self) -> [u8; Self::BYTES_LENGTH] {
let mut buf = [0u8; Self::BYTES_LENGTH];

buf[..8].copy_from_slice(&self.number.to_be_bytes());
buf[8..16].copy_from_slice(&self.timestamp.to_be_bytes());
if self.base_fee != U256::ZERO {
buf[16..48].copy_from_slice(&self.base_fee.to_be_bytes::<32>());
}
buf[48..56].copy_from_slice(&self.gas_limit.to_be_bytes());
buf[56..58].copy_from_slice(&self.num_transactions.to_be_bytes());
buf[58..].copy_from_slice(&self.num_l1_messages.to_be_bytes());
buf
}
}
118 changes: 118 additions & 0 deletions crates/codec/src/decoding/batch.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
use crate::{BlockContext, L2Block, decoding::payload::PayloadData};
use alloy_primitives::{B256, bytes::BufMut, keccak256};
use scroll_alloy_consensus::TxL1Message;

/// The deserialized batch data.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Batch {
/// The batch version.
pub version: u8,
/// The amount of blocks for each chunk of the batch. Only relevant for codec versions v0 ->
/// v6.
pub chunks_block_count: Option<Vec<usize>>,
/// The data for the batch.
pub data: PayloadData,
}

impl Batch {
/// Returns a new instance of a batch.
pub fn new(version: u8, chunks_block_count: Option<Vec<usize>>, data: PayloadData) -> Self {
Self { version, chunks_block_count, data }
}

/// Computes the hash for the batch, using the provided L1 messages associated with each block.
pub fn try_hash(&self, l1_messages: &[TxL1Message]) -> Option<B256> {
// From version 7 and above, the batch doesn't have a data hash.
if self.version >= 7 {
return None;
}

let chunks_count = self.chunks_block_count.as_ref()?;
let blocks_buf = &mut (&**self.data.l2_blocks());
let l1_messages = &mut (&*l1_messages);

let mut chunk_hashes = Vec::with_capacity(chunks_count.len() * 32);

for chunk_count in chunks_count {
// slice the blocks at chunk_count and filter l1 message.
let blocks = blocks_buf.get(..*chunk_count)?;
let l1_messages_count = blocks.iter().map(|b| b.context.num_l1_messages as usize).sum();
let messages = l1_messages.get(..l1_messages_count)?;

// compute the chunk data hash.
chunk_hashes.append(&mut hash_chunk(self.version, blocks, messages).to_vec());

// advance the buffer.
*blocks_buf = blocks_buf.get(*chunk_count..).unwrap_or(&[]);
}

Some(keccak256(chunk_hashes))
}
}

/// Compute the hash for the chunk.
fn hash_chunk(version: u8, l2_blocks: &[L2Block], l1_messages: &[TxL1Message]) -> B256 {
// reserve the correct capacity.
let mut capacity = l2_blocks.len() * (BlockContext::BYTES_LENGTH - 2) + l1_messages.len() * 32;
if version == 0 {
capacity += l2_blocks.iter().map(|b| b.transactions.len()).sum::<usize>();
}
let mut buf = Vec::with_capacity(capacity);

for block in l2_blocks {
let context = block.context.to_be_bytes();
// we don't use the last 2 bytes.
// <https://github.com/scroll-tech/da-codec/blob/main/encoding/codecv0_types.go#L175>
buf.put_slice(&context[..BlockContext::BYTES_LENGTH - 2]);
}

for l1_message in l1_messages {
buf.put_slice(l1_message.tx_hash().as_slice())
}

// for v0, we add the l2 transaction hashes.
if version == 0 {
for block in l2_blocks {
for tx in &block.transactions {
buf.put_slice(keccak256(&tx.0).as_slice());
}
}
}

keccak256(buf)
}

#[cfg(test)]
mod tests {
use crate::decoding::{test_utils::read_to_bytes, v0::decode_v0};

use crate::decoding::v1::decode_v1;
use alloy_primitives::b256;

#[test]
fn test_should_compute_data_hash_v0() -> eyre::Result<()> {
// <https://etherscan.io/tx/0x2c7bb77d6086befd9bdcf936479fd246d1065cbd2c6aff55b1d39a67aff965c1>
let raw_calldata = read_to_bytes("../codec/testdata/calldata_v0.bin")?;
let batch = decode_v0(&raw_calldata)?;

let hash = batch.try_hash(&[]).unwrap();

assert_eq!(hash, b256!("33e608dbf683c1ee03a34d01de52f67d60a0563b7e713b65a7395bb3b646f71f"));

Ok(())
}

#[test]
fn test_should_compute_data_hash_v1() -> eyre::Result<()> {
// <https://etherscan.io/tx/0x27d73eef6f0de411f8db966f0def9f28c312a0ae5cfb1ac09ec23f8fa18b005b>
let raw_calldata = read_to_bytes("../codec/testdata/calldata_v1.bin")?;
let blob = read_to_bytes("../codec/testdata/blob_v1.bin")?;
let batch = decode_v1(&raw_calldata, &blob)?;

let hash = batch.try_hash(&[]).unwrap();

assert_eq!(hash, b256!("c20f5914a772663080f8a77955b33814a04f7a19c880536e562a1bcfd5343a37"));

Ok(())
}
}
2 changes: 1 addition & 1 deletion crates/codec/src/decoding/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ mod tests {

#[test]
fn test_should_skip_unused_blob_bytes() -> eyre::Result<()> {
let blob = read_to_bytes("./src/testdata/blob_v1.bin")?;
let blob = read_to_bytes("./testdata/blob_v1.bin")?;
let iterator = BlobSliceIter::from_blob_slice(&blob);

let val = iterator.take(256).copied().collect::<Vec<_>>();
Expand Down
13 changes: 12 additions & 1 deletion crates/codec/src/decoding/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,23 @@ macro_rules! from_be_bytes_slice_and_advance_buf {
let mut arr = [0u8; ::std::mem::size_of::<$ty>()];
let size = $size;
let size_of = ::std::mem::size_of::<$ty>();
arr[size_of - size..].copy_from_slice(&$slice[0..size]);
arr[size_of - size..].copy_from_slice(&$slice[..size]);
::alloy_primitives::bytes::Buf::advance($slice, size);
<$ty>::from_be_bytes(arr)
}};
}

/// Calls `from_slice` on the provided type using the passed in buffer and advances it.
#[macro_export]
macro_rules! from_slice_and_advance_buf {
($ty:ty, $slice: expr) => {{
let size_of = ::std::mem::size_of::<$ty>();
let t = <$ty>::from_slice(&$slice[..size_of]);
::alloy_primitives::bytes::Buf::advance($slice, size_of);
t
}};
}

/// Check the buffer input to have the required length. Returns an Eof error otherwise.
#[macro_export]
macro_rules! check_buf_len {
Expand Down
9 changes: 9 additions & 0 deletions crates/codec/src/decoding/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
//! Decoding implementations for the commit data.

/// Batch related structures.
pub mod batch;

/// Blob related helpers.
pub mod blob;

Expand All @@ -14,12 +17,18 @@ pub mod v1;
/// V2 implementation of the decoding.
pub mod v2;

/// V3 implementation of the decoding.
pub mod v3;

/// V4 implementation of the decoding.
pub mod v4;

/// V7 implementation of the decoding.
pub mod v7;

/// Decoded payload.
pub(crate) mod payload;

/// Tests utils.
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils;
Expand Down
68 changes: 68 additions & 0 deletions crates/codec/src/decoding/payload.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
//! Commit payload.
use crate::L2Block;
use alloy_primitives::B256;
use std::vec::Vec;

/// The payload data on the L1.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PayloadData {
/// The L2 blocks from the commit payload.
pub blocks: Vec<L2Block>,
/// Contains information about the current state of the L1 message queue.
pub l1_message_queue_info: L1MessageQueueInfo,
}

/// Information about the state of the L1 message queue.
#[derive(Debug, Clone, PartialEq, Eq, derive_more::From)]
pub enum L1MessageQueueInfo {
/// The queue index of the l1 message.
Indexed(u64),
/// The hashed state of the l1 message queue.
Hashed {
/// The previous l1 message queue hash.
prev_l1_message_queue_hash: B256,
/// The post l1 message queue hash.
post_l1_message_queue_hash: B256,
},
}

impl PayloadData {
/// Returns the list [`L2Block`] committed.
pub fn l2_blocks(&self) -> &Vec<L2Block> {
&self.blocks
}

/// Returns the list [`L2Block`] committed.
pub fn into_l2_blocks(self) -> Vec<L2Block> {
self.blocks
}

/// Returns the l1 message queue index of the first message in the batch.
pub fn queue_index_start(&self) -> Option<u64> {
match self.l1_message_queue_info {
L1MessageQueueInfo::Indexed(index) => Some(index),
L1MessageQueueInfo::Hashed { .. } => None,
}
}

/// Returns the l1 message queue hash before the commitment of the batch.
pub fn prev_l1_message_queue_hash(&self) -> Option<&B256> {
match self.l1_message_queue_info {
L1MessageQueueInfo::Indexed(_) => None,
L1MessageQueueInfo::Hashed { ref prev_l1_message_queue_hash, .. } => {
Some(prev_l1_message_queue_hash)
}
}
}

/// Returns the l1 message queue hash after the commitment of the batch.
pub fn post_l1_message_queue_hash(&self) -> Option<&B256> {
match self.l1_message_queue_info {
L1MessageQueueInfo::Indexed(_) => None,
L1MessageQueueInfo::Hashed { ref post_l1_message_queue_hash, .. } => {
Some(post_l1_message_queue_hash)
}
}
}
}
Loading