Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
179 changes: 94 additions & 85 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -613,6 +613,7 @@ mini-moka = "0.10"
tar-no-std = { version = "0.3.2", default-features = false }
miniz_oxide = { version = "0.8.4", default-features = false }
chrono = "0.4.41"
encoder-standard = { git = "https://github.com/scroll-tech/da-codec", default-features = false }

# metrics
metrics = "0.24.0"
Expand Down
4 changes: 2 additions & 2 deletions crates/engine/tree/src/tree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2582,8 +2582,8 @@ pub enum BlockStatus {

/// How a payload was inserted if it was valid.
///
/// If the payload was valid, but has already been seen, [`InsertPayloadOk::AlreadySeen(_)`] is
/// returned, otherwise [`InsertPayloadOk::Inserted(_)`] is returned.
/// If the payload was valid, but has already been seen, [`InsertPayloadOk::AlreadySeen`] is
/// returned, otherwise [`InsertPayloadOk::Inserted`] is returned.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum InsertPayloadOk {
/// The payload was valid, but we have already seen it.
Expand Down
2 changes: 1 addition & 1 deletion crates/net/downloaders/src/bodies/bodies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ where
/// Max requests to handle at the same time
///
/// This depends on the number of active peers but will always be
/// [`min_concurrent_requests`..`max_concurrent_requests`]
/// `min_concurrent_requests..max_concurrent_requests`
#[inline]
fn concurrent_request_limit(&self) -> usize {
let num_peers = self.client.num_connected_peers();
Expand Down
2 changes: 1 addition & 1 deletion crates/net/downloaders/src/headers/reverse_headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ where
/// Max requests to handle at the same time
///
/// This depends on the number of active peers but will always be
/// [`min_concurrent_requests`..`max_concurrent_requests`]
/// `min_concurrent_requests..max_concurrent_requests`
#[inline]
fn concurrent_request_limit(&self) -> usize {
let num_peers = self.client.num_connected_peers();
Expand Down
6 changes: 3 additions & 3 deletions crates/net/network/src/test_utils/testnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -788,9 +788,9 @@ impl NetworkEventStream {
peers
}

/// Ensures that the first two events are a [`NetworkEvent::Peer(PeerEvent::PeerAdded`] and
/// [`NetworkEvent::ActivePeerSession`], returning the [`PeerId`] of the established
/// session.
/// Ensures that the first two events are a [`NetworkEvent::Peer`] and
/// [`PeerEvent::PeerAdded`][`NetworkEvent::ActivePeerSession`], returning the [`PeerId`] of the
/// established session.
pub async fn peer_added_and_established(&mut self) -> Option<PeerId> {
let peer_id = match self.inner.next().await {
Some(NetworkEvent::Peer(PeerEvent::PeerAdded(peer_id))) => peer_id,
Expand Down
4 changes: 2 additions & 2 deletions crates/node/core/src/args/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ pub struct PruningArgs {
pub receipts_before: Option<BlockNumber>,
// Receipts Log Filter
/// Configure receipts log filter. Format:
/// <`address`>:<`prune_mode`>[,<`address`>:<`prune_mode`>...] Where <`prune_mode`> can be
/// 'full', 'distance:<`blocks`>', or 'before:<`block_number`>'
/// <`address`>:<`prune_mode`>... where <`prune_mode`> can be 'full', 'distance:<`blocks`>', or
/// 'before:<`block_number`>'
#[arg(long = "prune.receiptslogfilter", value_name = "FILTER_CONFIG", conflicts_with_all = &["receipts_full", "receipts_pre_merge", "receipts_distance", "receipts_before"], value_parser = parse_receipts_log_filter)]
pub receipts_log_filter: Option<ReceiptsLogPruneConfig>,

Expand Down
6 changes: 3 additions & 3 deletions crates/prune/types/src/target.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,9 +156,9 @@ impl PruneModes {
/// left in database after the pruning.
///
/// 1. For [`PruneMode::Full`], it fails if `MIN_BLOCKS > 0`.
/// 2. For [`PruneMode::Distance(distance`)], it fails if `distance < MIN_BLOCKS + 1`. `+ 1` is
/// needed because `PruneMode::Distance(0)` means that we leave zero blocks from the latest,
/// meaning we have one block in the database.
/// 2. For [`PruneMode::Distance`], it fails if `distance < MIN_BLOCKS + 1`. `+ 1` is needed because
/// `PruneMode::Distance(0)` means that we leave zero blocks from the latest, meaning we have one
/// block in the database.
#[cfg(any(test, feature = "serde"))]
fn deserialize_opt_prune_mode_with_min_blocks<
'de,
Expand Down
2 changes: 1 addition & 1 deletion crates/rpc/rpc-e2e-tests/src/rpc_compat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pub struct RpcTestCase {
/// Action that runs RPC compatibility tests from execution-apis test data
#[derive(Debug)]
pub struct RunRpcCompatTests {
/// RPC methods to test (e.g., ["`eth_getLogs`"])
/// RPC methods to test (e.g. `eth_getLogs`)
pub methods: Vec<String>,
/// Path to the execution-apis tests directory
pub test_data_path: String,
Expand Down
4 changes: 2 additions & 2 deletions crates/scroll/alloy/evm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ scroll-alloy-hardforks = { workspace = true, default-features = false }
# misc
auto_impl = { workspace = true, default-features = false }
serde = { workspace = true, default-features = false, features = ["derive"], optional = true }
zstd = { version = "=0.13.3", features = ["experimental"], default-features = false, optional = true }
encoder-standard = { workspace = true, default-features = false, optional = true }

[dev-dependencies]
alloy-hardforks.workspace = true
Expand Down Expand Up @@ -69,4 +69,4 @@ serde = [
"scroll-alloy-hardforks/serde",
"alloy-hardforks/serde",
]
zstd_compression = ["zstd"]
zstd_compression = ["encoder-standard"]
31 changes: 2 additions & 29 deletions crates/scroll/alloy/evm/src/tx/compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,35 +13,8 @@ mod zstd_compression {
use super::*;
use std::io::Write;

use encoder_standard::{init_zstd_encoder, N_BLOCK_SIZE_TARGET};
use revm_scroll::l1block::TX_L1_FEE_PRECISION_U256;
use zstd::{
stream::Encoder,
zstd_safe::{CParameter, ParamSwitch},
};

/// The maximum size of the compression window in bytes (`2^CL_WINDOW_LIMIT`).
const CL_WINDOW_LIMIT: u32 = 22;

/// The zstd block size target.
const N_BLOCK_SIZE_TARGET: u32 = 124 * 1024;

fn compressor(target_block_size: u32) -> Encoder<'static, Vec<u8>> {
let mut encoder = Encoder::new(Vec::new(), 0).expect("Failed to create zstd encoder");
encoder
.set_parameter(CParameter::LiteralCompressionMode(ParamSwitch::Disable))
.expect("Failed to set literal compression mode");
encoder
.set_parameter(CParameter::WindowLog(CL_WINDOW_LIMIT))
.expect("Failed to set window log");
encoder
.set_parameter(CParameter::TargetCBlockSize(target_block_size))
.expect("Failed to set target block size");
encoder.include_checksum(false).expect("Failed to disable checksum");
encoder.include_magicbytes(false).expect("Failed to disable magic bytes");
encoder.include_dictid(false).expect("Failed to disable dictid");
encoder.include_contentsize(true).expect("Failed to include content size");
encoder
}

/// Computes the compression ratio for the provided bytes.
///
Expand All @@ -54,7 +27,7 @@ mod zstd_compression {
}

// Instantiate the compressor
let mut compressor = compressor(N_BLOCK_SIZE_TARGET);
let mut compressor = init_zstd_encoder(N_BLOCK_SIZE_TARGET);

// Set the pledged source size to the length of the bytes
// and write the bytes to the compressor.
Expand Down
2 changes: 1 addition & 1 deletion crates/storage/db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//! MDBX implementation for reth's database abstraction layer.
//!
//! This crate is an implementation of [`reth-db-api`] for MDBX, as well as a few other common
//! This crate is an implementation of `reth-db-api` for MDBX, as well as a few other common
//! database types.
//!
//! # Overview
Expand Down
2 changes: 1 addition & 1 deletion crates/trie/db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//! An integration of [`reth-trie`] with [`reth-db`].
//! An integration of `reth-trie` with `reth-db`.

#![cfg_attr(not(test), warn(unused_crate_dependencies))]

Expand Down
2 changes: 2 additions & 0 deletions deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ ignore-sources = [
"https://github.com/scroll-tech/bn254",
"https://github.com/scroll-tech/zktrie.git",
"https://github.com/scroll-tech/scroll-revm.git",
"https://github.com/scroll-tech/da-codec.git",
]

[[licenses.clarify]]
Expand Down Expand Up @@ -109,4 +110,5 @@ allow-git = [
"https://github.com/scroll-tech/poseidon-bn254",
"https://github.com/scroll-tech/scroll-revm.git",
"https://github.com/scroll-tech/revm.git",
"https://github.com/scroll-tech/da-codec.git",
]
2 changes: 1 addition & 1 deletion docs/vocs/docs/pages/cli/reth/node.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,7 @@ Pruning:
Prune receipts before the specified block number. The specified block number is not pruned

--prune.receiptslogfilter <FILTER_CONFIG>
Configure receipts log filter. Format: <`address`>:<`prune_mode`>[,<`address`>:<`prune_mode`>...] Where <`prune_mode`> can be 'full', 'distance:<`blocks`>', or 'before:<`block_number`>'
Configure receipts log filter. Format: <`address`>:<`prune_mode`>... where <`prune_mode`> can be 'full', 'distance:<`blocks`>', or 'before:<`block_number`>'

--prune.accounthistory.full
Prunes all account history
Expand Down
Loading