Skip to content

Commit 066609d

Browse files
committed
Merge branch 'main' into tests/improve-framework
2 parents fe784c1 + 78999f4 commit 066609d

File tree

12 files changed

+243
-25
lines changed

12 files changed

+243
-25
lines changed

.cargo/config.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
[alias]
22
docs = "doc --workspace --all-features --no-deps"
3+
[build]
4+
rustflags = ["--cfg", "tokio_unstable"]

.dockerignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
# include source files
55
!/bin
6+
!/.cargo
67
!/crates
78
!/testing
89
!/tests

.github/workflows/book.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ jobs:
99
permissions:
1010
contents: write
1111
steps:
12-
- uses: actions/checkout@v4
12+
- uses: actions/checkout@v5
1313
- uses: peaceiris/actions-mdbook@v2
1414
with:
1515
mdbook-version: 'latest'

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ serde_json = { version = "1.0" }
230230
sea-orm = { version = "1.1.0" }
231231
strum = "0.27.1"
232232
thiserror = "2.0"
233-
tokio = { version = "1.39", default-features = false }
233+
tokio = { version = "1.39", default-features = false, features = ["tracing"] }
234234
tokio-stream = { version = "0.1", default-features = false }
235235
tracing = "0.1.0"
236236
getrandom = { version = "0.2", features = ["js"] }

Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ RUN --mount=target=. \
1414
FROM chef AS builder
1515
WORKDIR /app
1616
COPY --from=planner /recipe.json recipe.json
17+
COPY .cargo /app/.cargo
1718
RUN cargo chef cook --release --recipe-path recipe.json
1819
RUN --mount=target=. \
1920
cargo build ${CARGO_FEATURES:+--features $CARGO_FEATURES} --release --target-dir=/app-target
@@ -29,6 +30,6 @@ WORKDIR /app
2930

3031
COPY --from=builder /app-target/release/rollup-node /bin/
3132

32-
EXPOSE 30303 30303/udp 9001 8545 8546
33+
EXPOSE 30303 30303/udp 9001 8545 8546 6669
3334

3435
ENTRYPOINT ["rollup-node"]

Dockerfile.test

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ RUN cargo install cargo-chef --locked
88

99
FROM chef AS planner
1010
WORKDIR /app
11-
1211
COPY . .
1312
# Hacky: Replace tests with dummy stub to avoid workspace member issues of top-level Cargo.toml.
1413
# This is needed because within the Docker integration tests we don't need the tests crate
@@ -28,6 +27,7 @@ RUN cargo chef prepare --recipe-path /recipe.json
2827
FROM chef AS builder
2928
WORKDIR /app
3029
COPY --from=planner /recipe.json recipe.json
30+
COPY .cargo /app/.cargo
3131
RUN --mount=type=cache,target=/usr/local/cargo/registry \
3232
--mount=type=cache,target=/usr/local/cargo/git \
3333
cargo chef cook --release --recipe-path recipe.json
@@ -47,6 +47,6 @@ WORKDIR /app
4747

4848
COPY --from=builder /app-target/release/rollup-node /bin/
4949

50-
EXPOSE 30303 30303/udp 9001 8545 8546
50+
EXPOSE 30303 30303/udp 9001 8545 8546 6669
5151

5252
ENTRYPOINT ["rollup-node"]

Dockerfile.test.dockerignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
# include source files
55
!/bin
6+
!/.cargo
67
!/crates
78
!/testing
89
!book.toml

crates/chain-orchestrator/src/lib.rs

Lines changed: 78 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ use rollup_node_providers::L1MessageProvider;
2121
use rollup_node_sequencer::{Sequencer, SequencerEvent};
2222
use rollup_node_signer::{SignatureAsBytes, SignerEvent, SignerHandle};
2323
use rollup_node_watcher::L1Notification;
24-
use scroll_alloy_consensus::TxL1Message;
24+
use scroll_alloy_consensus::{ScrollTxEnvelope, TxL1Message};
2525
use scroll_alloy_hardforks::ScrollHardforks;
2626
use scroll_alloy_network::Scroll;
2727
use scroll_alloy_provider::ScrollEngineApi;
@@ -356,13 +356,23 @@ impl<
356356
let _ = tx.send(self.network.handle().clone());
357357
}
358358
ChainOrchestratorCommand::UpdateFcsHead((head, sender)) => {
359+
// Collect transactions of reverted blocks from l2 client.
360+
let reverted_transactions = self
361+
.collect_reverted_txs_in_range(
362+
head.number.saturating_add(1),
363+
self.engine.fcs().head_block_info().number,
364+
)
365+
.await?;
359366
self.engine.update_fcs(Some(head), None, None).await?;
360367
self.database
361368
.tx_mut(move |tx| async move {
362369
tx.purge_l1_message_to_l2_block_mappings(Some(head.number + 1)).await?;
363370
tx.set_l2_head_block_number(head.number).await
364371
})
365372
.await?;
373+
374+
// Add all reverted transactions to the transaction pool.
375+
self.reinsert_txs_into_pool(reverted_transactions).await;
366376
self.notify(ChainOrchestratorEvent::FcsHeadUpdated(head));
367377
let _ = sender.send(());
368378
}
@@ -560,6 +570,43 @@ impl<
560570
Ok(Some(ChainOrchestratorEvent::NewL1Block(block_number)))
561571
}
562572

573+
/// Collects reverted L2 transactions in [from, to], excluding L1 messages.
574+
async fn collect_reverted_txs_in_range(
575+
&self,
576+
from: u64,
577+
to: u64,
578+
) -> Result<Vec<ScrollTxEnvelope>, ChainOrchestratorError> {
579+
let mut reverted_transactions: Vec<ScrollTxEnvelope> = Vec::new();
580+
for number in from..=to {
581+
let block = self
582+
.l2_client
583+
.get_block_by_number(number.into())
584+
.full()
585+
.await?
586+
.ok_or_else(|| ChainOrchestratorError::L2BlockNotFoundInL2Client(number))?;
587+
588+
let block = block.into_consensus().map_transactions(|tx| tx.inner.into_inner());
589+
reverted_transactions.extend(
590+
block.into_body().transactions.into_iter().filter(|tx| !tx.is_l1_message()),
591+
);
592+
}
593+
Ok(reverted_transactions)
594+
}
595+
596+
/// Reinserts given L2 transactions into the transaction pool.
597+
async fn reinsert_txs_into_pool(&self, txs: Vec<ScrollTxEnvelope>) {
598+
for tx in txs {
599+
let encoded_tx = tx.encoded_2718();
600+
if let Err(err) = self.l2_client.send_raw_transaction(&encoded_tx).await {
601+
tracing::warn!(
602+
target: "scroll::chain_orchestrator",
603+
?err,
604+
"failed to reinsert reverted transaction into pool"
605+
);
606+
}
607+
}
608+
}
609+
563610
/// Handles a reorganization event by deleting all indexed data which is greater than the
564611
/// provided block number.
565612
async fn handle_l1_reorg(
@@ -570,27 +617,36 @@ impl<
570617
let UnwindResult { l1_block_number, queue_index, l2_head_block_number, l2_safe_block_info } =
571618
self.database.unwind(genesis_hash, block_number).await?;
572619

573-
let l2_head_block_info = if let Some(block_number) = l2_head_block_number {
574-
// Fetch the block hash of the new L2 head block.
575-
let block_hash = self
576-
.l2_client
577-
.get_block_by_number(block_number.into())
578-
.full()
579-
.await?
580-
.expect("L2 head block must exist")
581-
.header
582-
.hash_slow();
620+
let (l2_head_block_info, reverted_transactions) =
621+
if let Some(block_number) = l2_head_block_number {
622+
// Fetch the block hash of the new L2 head block.
623+
let block_hash = self
624+
.l2_client
625+
.get_block_by_number(block_number.into())
626+
.full()
627+
.await?
628+
.expect("L2 head block must exist")
629+
.header
630+
.hash_slow();
631+
632+
// Cancel the inflight payload building job if the head has changed.
633+
if let Some(s) = self.sequencer.as_mut() {
634+
s.cancel_payload_building_job();
635+
};
636+
637+
// Collect transactions of reverted blocks from l2 client.
638+
let reverted_transactions = self
639+
.collect_reverted_txs_in_range(
640+
block_number.saturating_add(1),
641+
self.engine.fcs().head_block_info().number,
642+
)
643+
.await?;
583644

584-
// Cancel the inflight payload building job if the head has changed.
585-
if let Some(s) = self.sequencer.as_mut() {
586-
s.cancel_payload_building_job();
645+
(Some(BlockInfo { number: block_number, hash: block_hash }), reverted_transactions)
646+
} else {
647+
(None, Vec::new())
587648
};
588649

589-
Some(BlockInfo { number: block_number, hash: block_hash })
590-
} else {
591-
None
592-
};
593-
594650
// If the L1 reorg is before the origin of the inflight payload building job, cancel it.
595651
if Some(l1_block_number) <
596652
self.sequencer
@@ -608,6 +664,9 @@ impl<
608664
self.engine.update_fcs(l2_head_block_info, l2_safe_block_info, None).await?;
609665
}
610666

667+
// Add all reverted transactions to the transaction pool.
668+
self.reinsert_txs_into_pool(reverted_transactions).await;
669+
611670
let event = ChainOrchestratorEvent::L1Reorg {
612671
l1_block_number,
613672
queue_index,

crates/database/migration/src/m20250408_150338_load_header_metadata.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ async fn get_file_size(client: &ClientWithMiddleware, url: &str) -> eyre::Result
199199

200200
/// Check the hash of the data.
201201
fn verify_data_hash(expected_data_hash: B256, data: &[u8]) -> eyre::Result<()> {
202-
let hash = B256::try_from(Sha256::digest(data).as_slice())?;
202+
let hash = B256::from_slice(Sha256::digest(data).as_ref());
203203
if hash != expected_data_hash {
204204
bail!("corrupted data, expected data to hash to {expected_data_hash}, got {hash}.")
205205
}

crates/node/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@ jsonrpsee = { version = "0.26.0", features = ["server", "client", "macros"] }
103103
reqwest.workspace = true
104104
tokio.workspace = true
105105
tracing.workspace = true
106+
console-subscriber = "0.5.0"
106107

107108
[dev-dependencies]
108109
alloy-chains.workspace = true

0 commit comments

Comments
 (0)