Skip to content

Commit d53aedb

Browse files
authored
Revert "feat: integrate batch changes (#41)"
This reverts commit dbb9cb0.
1 parent dbb9cb0 commit d53aedb

File tree

23 files changed

+553
-417
lines changed

23 files changed

+553
-417
lines changed

Cargo.lock

Lines changed: 1 addition & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/codec/src/decoding/batch.rs

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ impl Batch {
5454
let num_l1_messages = b.context.num_l1_messages as usize;
5555
let block_messages = l1_messages_buf.get(..num_l1_messages).unwrap_or(&[]);
5656
*l1_messages_buf = l1_messages_buf.get(num_l1_messages..).unwrap_or(&[]);
57-
5857
block_messages
5958
})
6059
.collect::<Vec<_>>();
@@ -114,7 +113,7 @@ fn hash_chunk(
114113
mod tests {
115114
use crate::decoding::{test_utils::read_to_bytes, v0::decode_v0, v1::decode_v1};
116115

117-
use alloy_primitives::b256;
116+
use alloy_primitives::{address, b256, bytes, U256};
118117
use scroll_alloy_consensus::TxL1Message;
119118

120119
#[test]
@@ -135,10 +134,35 @@ mod tests {
135134
// <https://etherscan.io/tx/0xdc0a315b25b46f4c1085e3884c63f8ede61e984e47655f7667e5f14e3df55f82>
136135
let raw_calldata = read_to_bytes("./testdata/calldata_v0_with_l1_messages.bin")?;
137136
let batch = decode_v0(&raw_calldata)?;
138-
let l1_messages: Vec<TxL1Message> =
139-
serde_json::from_str(&std::fs::read_to_string("./testdata/l1_messages_v0.json")?)?;
140137

141-
let hash = batch.try_compute_data_hash(&l1_messages).unwrap();
138+
let hash = batch
139+
.try_compute_data_hash(&[
140+
TxL1Message {
141+
queue_index: 39,
142+
gas_limit: 180000,
143+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
144+
value: U256::ZERO,
145+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
146+
input: bytes!("8ef1332e000000000000000000000000f1af3b23de0a5ca3cab7261cb0061c0d779a5c7b00000000000000000000000033b60d5dd260d453cac3782b0bdc01ce846721420000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002700000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e48431f5c1000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000006efdbff2a14a7c8e15944d1f4a48f9f95f663a4000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb000000000000000000000000c451b0191351ce308fdfd779d73814c910fc5ecb00000000000000000000000000000000000000000000000000000005d21dba0000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
147+
},
148+
TxL1Message {
149+
queue_index: 40,
150+
gas_limit: 168000,
151+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
152+
value: U256::ZERO,
153+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
154+
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf00000000000000000000000000000000000000000000000000011c37937e08000000000000000000000000000000000000000000000000000000000000000002800000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e8748000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c000000000000000000000000b89db2813541287a4dd1fc6801eec30595ecdc6c0000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
155+
},
156+
TxL1Message {
157+
queue_index: 41,
158+
gas_limit: 168000,
159+
to: address!("781e90f1c8Fc4611c9b7497C3B47F99Ef6969CbC"),
160+
value: U256::ZERO,
161+
sender: address!("7885BcBd5CeCEf1336b5300fb5186A12DDD8c478"),
162+
input: bytes!("8ef1332e0000000000000000000000007f2b8c31f88b6006c382775eea88297ec1e3e9050000000000000000000000006ea73e05adc79974b931123675ea8f78ffdacdf0000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000000000000000002900000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000a4232e87480000000000000000000000003219c394111d45757ccb68a4fd353b4f7f9660960000000000000000000000003219c394111d45757ccb68a4fd353b4f7f966096000000000000000000000000000000000000000000000000002386f26fc100000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"),
163+
},
164+
])
165+
.unwrap();
142166

143167
assert_eq!(hash, b256!("55fd647c58461d910b5bfb4539f2177ba575c9c8d578a344558976a4375cc287"));
144168

crates/codec/src/error.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
use alloy_primitives::U256;
2-
31
/// An error occurring during the codec process.
42
#[derive(Debug, thiserror::Error)]
53
pub enum CodecError {
@@ -15,8 +13,6 @@ pub enum DecodingError {
1513
MissingCodecVersion,
1614
#[error("unsupported codec version {0}")]
1715
UnsupportedCodecVersion(u8),
18-
#[error("malformed codec version: {0}")]
19-
MalformedCodecVersion(U256),
2016
#[error("missing blob from data source")]
2117
MissingBlob,
2218
#[error("missing chunk data")]

crates/codec/src/lib.rs

Lines changed: 3 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ use crate::decoding::{
1313
};
1414

1515
use alloy_eips::eip4844::Blob;
16-
use alloy_primitives::{ruint::UintTryTo, Bytes, U256};
16+
use alloy_primitives::Bytes;
1717

1818
/// The Codec.
1919
#[derive(Debug)]
@@ -43,7 +43,7 @@ impl Codec {
4343
/// Decodes the input data and returns the decoded [`Batch`].
4444
pub fn decode<T: CommitDataSource>(input: &T) -> Result<Batch, CodecError> {
4545
let calldata = input.calldata();
46-
let version = get_codec_version(calldata)?;
46+
let version = calldata.first().ok_or(DecodingError::MissingCodecVersion)?;
4747

4848
let payload = match version {
4949
0 => decode_v0(calldata)?,
@@ -63,7 +63,7 @@ impl Codec {
6363
let blob = input.blob().ok_or(DecodingError::MissingBlob)?;
6464
decode_v7(blob.as_ref())?
6565
}
66-
v => return Err(DecodingError::UnsupportedCodecVersion(v).into()),
66+
v => return Err(DecodingError::UnsupportedCodecVersion(*v).into()),
6767
};
6868

6969
Ok(payload)
@@ -77,23 +77,3 @@ pub trait CommitDataSource {
7777
/// Returns the blob for decoding.
7878
fn blob(&self) -> Option<&Blob>;
7979
}
80-
81-
/// Returns the codec version from the calldata.
82-
fn get_codec_version(calldata: &[u8]) -> Result<u8, DecodingError> {
83-
const CODEC_VERSION_OFFSET_START: usize = 4;
84-
const CODEC_VERSION_LEN: usize = 32;
85-
const CODEC_VERSION_OFFSET_END: usize = CODEC_VERSION_OFFSET_START + CODEC_VERSION_LEN;
86-
const HIGH_BYTES_MASK: U256 =
87-
U256::from_limbs([u64::MAX, u64::MAX, u64::MAX, 0xffffffffffffff00]);
88-
89-
let version = calldata
90-
.get(CODEC_VERSION_OFFSET_START..CODEC_VERSION_OFFSET_END)
91-
.ok_or(DecodingError::Eof)?;
92-
let version = U256::from_be_slice(version);
93-
94-
if (version & HIGH_BYTES_MASK) != U256::ZERO {
95-
return Err(DecodingError::MalformedCodecVersion(version))
96-
}
97-
98-
Ok(version.uint_try_to().expect("fits in single byte"))
99-
}

crates/codec/testdata/l1_messages_v0.json

Lines changed: 0 additions & 26 deletions
This file was deleted.

crates/database/db/src/db.rs

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,14 +46,15 @@ impl From<DatabaseConnection> for Database {
4646
#[cfg(test)]
4747
mod test {
4848
use crate::{operations::DatabaseOperations, test_utils::setup_test_db};
49-
5049
use arbitrary::{Arbitrary, Unstructured};
5150
use futures::StreamExt;
5251
use rand::Rng;
53-
use rollup_node_primitives::{BatchCommitData, L1MessageWithBlockNumber};
52+
use rollup_node_primitives::{
53+
BatchInput, BatchInputV1, BatchInputV2, L1MessageWithBlockNumber,
54+
};
5455

5556
#[tokio::test]
56-
async fn test_database_round_trip_batch_commit() {
57+
async fn test_database_round_trip_batch_input() {
5758
// Set up the test database.
5859
let db = setup_test_db().await;
5960

@@ -63,13 +64,24 @@ mod test {
6364
let mut u = Unstructured::new(&bytes);
6465

6566
// Generate a random BatchInputV1.
66-
let batch_commit = BatchCommitData::arbitrary(&mut u).unwrap();
67+
let batch_input_v1 = BatchInputV1::arbitrary(&mut u).unwrap();
68+
let batch_input = BatchInput::BatchInputDataV1(batch_input_v1);
69+
70+
// Round trip the BatchInput through the database.
71+
db.insert_batch_input(batch_input.clone()).await.unwrap();
72+
let batch_input_from_db =
73+
db.get_batch_input_by_batch_index(batch_input.batch_index()).await.unwrap().unwrap();
74+
assert_eq!(batch_input, batch_input_from_db);
75+
76+
// Generate a random BatchInputV2.
77+
let batch_input_v2 = BatchInputV2::arbitrary(&mut u).unwrap();
78+
let batch_input = BatchInput::BatchInputDataV2(batch_input_v2);
6779

6880
// Round trip the BatchInput through the database.
69-
db.insert_batch(batch_commit.clone()).await.unwrap();
70-
let batch_commit_from_db =
71-
db.get_batch_by_index(batch_commit.index).await.unwrap().unwrap();
72-
assert_eq!(batch_commit, batch_commit_from_db);
81+
db.insert_batch_input(batch_input.clone()).await.unwrap();
82+
let batch_input_from_db =
83+
db.get_batch_input_by_batch_index(batch_input.batch_index()).await.unwrap().unwrap();
84+
assert_eq!(batch_input, batch_input_from_db);
7385
}
7486

7587
#[tokio::test]

crates/database/db/src/models/batch_commit.rs

Lines changed: 0 additions & 55 deletions
This file was deleted.
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
use std::ops::Deref;
2+
3+
use rollup_node_primitives::{BatchInput as BatchInputPrimitive, BatchInputV1, BatchInputV2};
4+
use sea_orm::{entity::prelude::*, ActiveValue, FromJsonQueryResult};
5+
6+
/// A database model that represents a batch input.
7+
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
8+
#[sea_orm(table_name = "batch_input")]
9+
pub struct Model {
10+
#[sea_orm(primary_key)]
11+
index: i64,
12+
version: u8,
13+
codec_version: u8,
14+
hash: Vec<u8>,
15+
block_number: i64,
16+
parent_batch_header: Vec<u8>,
17+
#[sea_orm(column_type = "JsonBinary")]
18+
chunks: Chunks,
19+
skipped_l1_message_bitmap: Vec<u8>,
20+
blob_hash: Vec<u8>,
21+
finalized_block_number: Option<i64>,
22+
}
23+
24+
/// The relation for the batch input model.
25+
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
26+
pub enum Relation {}
27+
28+
/// The active model behavior for the batch input model.
29+
impl ActiveModelBehavior for ActiveModel {}
30+
31+
/// A wrapper for a list of chunks.
32+
#[derive(
33+
Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize, FromJsonQueryResult,
34+
)]
35+
pub struct Chunks(pub Vec<Vec<u8>>);
36+
37+
impl Deref for Chunks {
38+
type Target = Vec<Vec<u8>>;
39+
40+
fn deref(&self) -> &Self::Target {
41+
&self.0
42+
}
43+
}
44+
45+
impl From<BatchInputPrimitive> for ActiveModel {
46+
fn from(batch_input: BatchInputPrimitive) -> Self {
47+
let (version, batch_input_v1, blob_hash) = match batch_input {
48+
BatchInputPrimitive::BatchInputDataV1(batch_input) => (1, batch_input, vec![]),
49+
BatchInputPrimitive::BatchInputDataV2(batch_input) => {
50+
(2, batch_input.batch_input_base, batch_input.blob_hash.to_vec())
51+
}
52+
};
53+
Self {
54+
index: ActiveValue::Set(
55+
batch_input_v1.batch_index.try_into().expect("index should fit in i64"),
56+
),
57+
version: ActiveValue::Set(version),
58+
codec_version: ActiveValue::Set(batch_input_v1.version),
59+
hash: ActiveValue::Set(batch_input_v1.batch_hash.to_vec()),
60+
block_number: ActiveValue::Set(
61+
batch_input_v1.block_number.try_into().expect("block number should fit in i64"),
62+
),
63+
parent_batch_header: ActiveValue::Set(batch_input_v1.parent_batch_header),
64+
chunks: ActiveValue::Set(Chunks(batch_input_v1.chunks)),
65+
skipped_l1_message_bitmap: ActiveValue::Set(batch_input_v1.skipped_l1_message_bitmap),
66+
blob_hash: ActiveValue::Set(blob_hash),
67+
finalized_block_number: ActiveValue::Unchanged(None),
68+
}
69+
}
70+
}
71+
72+
impl From<Model> for BatchInputPrimitive {
73+
fn from(value: Model) -> Self {
74+
let chunks = value.chunks.0;
75+
let batch_input_v1 = BatchInputV1 {
76+
version: value.codec_version,
77+
batch_index: value.index.try_into().expect("data persisted in database is valid"),
78+
batch_hash: value
79+
.hash
80+
.as_slice()
81+
.try_into()
82+
.expect("data persisted in database is valid"),
83+
block_number: value
84+
.block_number
85+
.try_into()
86+
.expect("data persisted in database is valid"),
87+
parent_batch_header: value.parent_batch_header,
88+
chunks,
89+
skipped_l1_message_bitmap: value.skipped_l1_message_bitmap,
90+
};
91+
92+
if value.version == 1 {
93+
Self::BatchInputDataV1(batch_input_v1)
94+
} else {
95+
Self::BatchInputDataV2(BatchInputV2 {
96+
batch_input_base: batch_input_v1,
97+
blob_hash: value
98+
.blob_hash
99+
.as_slice()
100+
.try_into()
101+
.expect("data persisted in database is valid"),
102+
})
103+
}
104+
}
105+
}
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
/// This module contains the batch commit database model.
2-
pub mod batch_commit;
1+
/// This module contains the batch input database model.
2+
pub mod batch_input;
33

44
/// This module contains the L1 message database model.
55
pub mod l1_message;

0 commit comments

Comments
 (0)