diff --git a/Cargo.lock b/Cargo.lock index b9f22de9..7553edd0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11627,6 +11627,7 @@ dependencies = [ "sea-orm", "serde", "serde_json", + "strum 0.27.2", "tempfile", "thiserror 2.0.16", "tokio", diff --git a/Cargo.toml b/Cargo.toml index afec22e7..1c6683d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -228,6 +228,7 @@ reqwest = "0.12" serde = { version = "1.0" } serde_json = { version = "1.0" } sea-orm = { version = "1.1.0" } +strum = "0.27.1" thiserror = "2.0" tokio = { version = "1.39", default-features = false } tokio-stream = { version = "0.1", default-features = false } diff --git a/crates/chain-orchestrator/Cargo.toml b/crates/chain-orchestrator/Cargo.toml index 1596a1f4..a2c51183 100644 --- a/crates/chain-orchestrator/Cargo.toml +++ b/crates/chain-orchestrator/Cargo.toml @@ -54,7 +54,7 @@ futures.workspace = true metrics.workspace = true metrics-derive.workspace = true serde = { workspace = true, optional = true, features = ["derive"] } -strum = "0.27.1" +strum.workspace = true thiserror.workspace = true tokio = { workspace = true, features = ["full"] } tokio-stream.workspace = true diff --git a/crates/database/db/Cargo.toml b/crates/database/db/Cargo.toml index 708061f4..1f331462 100644 --- a/crates/database/db/Cargo.toml +++ b/crates/database/db/Cargo.toml @@ -30,6 +30,7 @@ metrics-derive.workspace = true sea-orm = { workspace = true, features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros"] } serde.workspace = true serde_json.workspace = true +strum.workspace = true tempfile = { version = "3.20.0", optional = true } thiserror.workspace = true tokio = { workspace = true, features = ["macros", "sync"] } diff --git a/crates/database/db/src/db.rs b/crates/database/db/src/db.rs index f28ae5bc..2705111e 100644 --- a/crates/database/db/src/db.rs +++ b/crates/database/db/src/db.rs @@ -1,7 +1,7 @@ use super::transaction::{DatabaseTransactionProvider, TXMut, TX}; use crate::{ error::DatabaseError, - metrics::DatabaseMetrics, + metrics::{DatabaseMetrics, DatabaseOperation, DatabaseOperationMetrics}, service::{query::DatabaseQuery, retry::Retry, DatabaseService, DatabaseServiceError}, DatabaseConnectionProvider, DatabaseReadOperations, DatabaseWriteOperations, L1MessageKey, UnwindResult, @@ -16,7 +16,10 @@ use sea_orm::{ sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}, DatabaseConnection, SqlxSqliteConnector, TransactionTrait, }; -use std::{fmt::Debug, future::Future, str::FromStr, sync::Arc, time::Duration}; +use std::{ + collections::HashMap, fmt::Debug, future::Future, str::FromStr, sync::Arc, time::Duration, +}; +use strum::IntoEnumIterator; use tokio::sync::{Mutex, Semaphore}; // TODO: make these configurable via CLI. @@ -36,13 +39,23 @@ const ACQUIRE_TIMEOUT_SECS: u64 = 5; #[derive(Debug)] pub struct Database { database: Retry>, + metrics: HashMap, } impl Database { /// Creates a new [`Database`] instance associated with the provided database URL. pub async fn new(database_url: &str) -> Result { let db = Arc::new(DatabaseInner::new(database_url).await?); - Ok(Self { database: Retry::new_with_default_config(db) }) + Ok(Self { database: Retry::new_with_default_config(db), metrics: Self::metrics() }) + } + + fn metrics() -> HashMap { + DatabaseOperation::iter() + .map(|i| { + let label = i.as_str(); + (i, DatabaseOperationMetrics::new_with_labels(&[("item", label)])) + }) + .collect() } /// Creates a new [`Database`] instance with SQLite-specific optimizations and custom pool @@ -64,7 +77,7 @@ impl Database { ) .await?, ); - Ok(Self { database: Retry::new_with_default_config(db) }) + Ok(Self { database: Retry::new_with_default_config(db), metrics: Self::metrics() }) } /// Creates a new [`Database`] instance for testing purposes, using the provided temporary @@ -72,7 +85,7 @@ impl Database { #[cfg(feature = "test-utils")] pub async fn test(dir: tempfile::TempDir) -> Result { let db = Arc::new(DatabaseInner::test(dir).await?); - Ok(Self { database: Retry::new_with_default_config(db) }) + Ok(Self { database: Retry::new_with_default_config(db), metrics: Self::metrics() }) } /// Returns a reference to the database tmp dir. @@ -113,14 +126,28 @@ impl Database { } } +/// Wraps a future, metering the completion of it. +macro_rules! metered { + ($task:expr, $this: ident, $method:ident($($args:expr),*)) => {{ + let metric = $this.metrics.get(&$task).expect("metric exists").clone(); + let now = std::time::Instant::now(); + let res = $this.$method($($args),*).await; + metric.task_duration.record(now.elapsed().as_secs_f64()); + res + }}; +} + #[async_trait::async_trait] impl DatabaseWriteOperations for Database { async fn insert_batch(&self, batch_commit: BatchCommitData) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let batch_commit = batch_commit.clone(); - async move { tx.insert_batch(batch_commit).await } - }) - .await + metered!( + DatabaseOperation::InsertBatch, + self, + tx_mut(move |tx| { + let batch_commit = batch_commit.clone(); + async move { tx.insert_batch(batch_commit).await } + }) + ) } async fn finalize_batches_up_to_index( @@ -128,100 +155,145 @@ impl DatabaseWriteOperations for Database { batch_index: u64, block_number: u64, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { - tx.finalize_batches_up_to_index(batch_index, block_number).await - }) - .await + metered!( + DatabaseOperation::FinalizeBatchesUpToIndex, + self, + tx_mut(move |tx| async move { + tx.finalize_batches_up_to_index(batch_index, block_number).await + }) + ) } async fn set_latest_l1_block_number(&self, block_number: u64) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.set_latest_l1_block_number(block_number).await }) - .await + metered!( + DatabaseOperation::SetLatestL1BlockNumber, + self, + tx_mut(move |tx| async move { tx.set_latest_l1_block_number(block_number).await }) + ) } async fn set_finalized_l1_block_number(&self, block_number: u64) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.set_finalized_l1_block_number(block_number).await }) - .await + metered!( + DatabaseOperation::SetFinalizedL1BlockNumber, + self, + tx_mut(move |tx| async move { tx.set_finalized_l1_block_number(block_number).await }) + ) } async fn set_processed_l1_block_number(&self, block_number: u64) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.set_processed_l1_block_number(block_number).await }) - .await + metered!( + DatabaseOperation::SetProcessedL1BlockNumber, + self, + tx_mut(move |tx| async move { tx.set_processed_l1_block_number(block_number).await }) + ) } async fn set_l2_head_block_number(&self, number: u64) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.set_l2_head_block_number(number).await }).await + metered!( + DatabaseOperation::SetL2HeadBlockNumber, + self, + tx_mut(move |tx| async move { tx.set_l2_head_block_number(number).await }) + ) } async fn fetch_and_update_unprocessed_finalized_batches( &self, finalized_l1_block_number: u64, ) -> Result, DatabaseError> { - self.tx_mut(move |tx| async move { - tx.fetch_and_update_unprocessed_finalized_batches(finalized_l1_block_number).await - }) - .await + metered!( + DatabaseOperation::FetchAndUpdateUnprocessedFinalizedBatches, + self, + tx_mut(move |tx| async move { + tx.fetch_and_update_unprocessed_finalized_batches(finalized_l1_block_number).await + }) + ) } async fn delete_batches_gt_block_number( &self, block_number: u64, ) -> Result { - self.tx_mut(move |tx| async move { tx.delete_batches_gt_block_number(block_number).await }) - .await + metered!( + DatabaseOperation::DeleteBatchesGtBlockNumber, + self, + tx_mut(move |tx| async move { tx.delete_batches_gt_block_number(block_number).await }) + ) } async fn delete_batches_gt_batch_index(&self, batch_index: u64) -> Result { - self.tx_mut(move |tx| async move { tx.delete_batches_gt_batch_index(batch_index).await }) - .await + metered!( + DatabaseOperation::DeleteBatchesGtBatchIndex, + self, + tx_mut(move |tx| async move { tx.delete_batches_gt_batch_index(batch_index).await }) + ) } async fn insert_l1_message(&self, l1_message: L1MessageEnvelope) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let l1_message = l1_message.clone(); - async move { tx.insert_l1_message(l1_message).await } - }) - .await + metered!( + DatabaseOperation::InsertL1Message, + self, + tx_mut(move |tx| { + let l1_message = l1_message.clone(); + async move { tx.insert_l1_message(l1_message).await } + }) + ) } async fn update_skipped_l1_messages(&self, indexes: Vec) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let indexes = indexes.clone(); - async move { tx.update_skipped_l1_messages(indexes).await } - }) - .await + metered!( + DatabaseOperation::UpdateSkippedL1Messages, + self, + tx_mut(move |tx| { + let indexes = indexes.clone(); + async move { tx.update_skipped_l1_messages(indexes).await } + }) + ) } async fn delete_l1_messages_gt( &self, l1_block_number: u64, ) -> Result, DatabaseError> { - self.tx_mut(move |tx| async move { tx.delete_l1_messages_gt(l1_block_number).await }).await + metered!( + DatabaseOperation::DeleteL1MessagesGt, + self, + tx_mut(move |tx| async move { tx.delete_l1_messages_gt(l1_block_number).await }) + ) } async fn prepare_on_startup( &self, genesis_hash: B256, ) -> Result<(Option, Option), DatabaseError> { - self.tx_mut(move |tx| async move { tx.prepare_on_startup(genesis_hash).await }).await + metered!( + DatabaseOperation::PrepareOnStartup, + self, + tx_mut(move |tx| async move { tx.prepare_on_startup(genesis_hash).await }) + ) } async fn delete_l2_blocks_gt_block_number( &self, block_number: u64, ) -> Result { - self.tx_mut( - move |tx| async move { tx.delete_l2_blocks_gt_block_number(block_number).await }, + metered!( + DatabaseOperation::DeleteL2BlocksGtBlockNumber, + self, + tx_mut( + move |tx| async move { tx.delete_l2_blocks_gt_block_number(block_number).await } + ) ) - .await } async fn delete_l2_blocks_gt_batch_index( &self, batch_index: u64, ) -> Result { - self.tx_mut(move |tx| async move { tx.delete_l2_blocks_gt_batch_index(batch_index).await }) - .await + metered!( + DatabaseOperation::DeleteL2BlocksGtBatchIndex, + self, + tx_mut(move |tx| async move { tx.delete_l2_blocks_gt_batch_index(batch_index).await }) + ) } async fn insert_blocks( @@ -229,11 +301,14 @@ impl DatabaseWriteOperations for Database { blocks: Vec, batch_info: BatchInfo, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let blocks = blocks.clone(); - async move { tx.insert_blocks(blocks, batch_info).await } - }) - .await + metered!( + DatabaseOperation::InsertBlocks, + self, + tx_mut(move |tx| { + let blocks = blocks.clone(); + async move { tx.insert_blocks(blocks, batch_info).await } + }) + ) } async fn insert_block( @@ -241,54 +316,74 @@ impl DatabaseWriteOperations for Database { block_info: BlockInfo, batch_info: BatchInfo, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.insert_block(block_info, batch_info).await }).await + metered!( + DatabaseOperation::InsertBlock, + self, + tx_mut(move |tx| async move { tx.insert_block(block_info, batch_info).await }) + ) } async fn insert_genesis_block(&self, genesis_hash: B256) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.insert_genesis_block(genesis_hash).await }).await + metered!( + DatabaseOperation::InsertGenesisBlock, + self, + tx_mut(move |tx| async move { tx.insert_genesis_block(genesis_hash).await }) + ) } async fn update_l1_messages_from_l2_blocks( &self, blocks: Vec, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let blocks = blocks.clone(); - async move { tx.update_l1_messages_from_l2_blocks(blocks).await } - }) - .await + metered!( + DatabaseOperation::UpdateL1MessagesFromL2Blocks, + self, + tx_mut(move |tx| { + let blocks = blocks.clone(); + async move { tx.update_l1_messages_from_l2_blocks(blocks).await } + }) + ) } async fn update_l1_messages_with_l2_block( &self, block_info: L2BlockInfoWithL1Messages, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let block_info = block_info.clone(); - async move { tx.update_l1_messages_with_l2_block(block_info).await } - }) - .await + metered!( + DatabaseOperation::UpdateL1MessagesWithL2Block, + self, + tx_mut(move |tx| { + let block_info = block_info.clone(); + async move { tx.update_l1_messages_with_l2_block(block_info).await } + }) + ) } async fn purge_l1_message_to_l2_block_mappings( &self, block_number: Option, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { - tx.purge_l1_message_to_l2_block_mappings(block_number).await - }) - .await + metered!( + DatabaseOperation::PurgeL1MessageToL2BlockMappings, + self, + tx_mut(move |tx| async move { + tx.purge_l1_message_to_l2_block_mappings(block_number).await + }) + ) } async fn insert_batch_consolidation_outcome( &self, outcome: BatchConsolidationOutcome, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| { - let outcome = outcome.clone(); - async move { tx.insert_batch_consolidation_outcome(outcome).await } - }) - .await + metered!( + DatabaseOperation::InsertBatchConsolidationOutcome, + self, + tx_mut(move |tx| { + let outcome = outcome.clone(); + async move { tx.insert_batch_consolidation_outcome(outcome).await } + }) + ) } async fn unwind( @@ -296,7 +391,11 @@ impl DatabaseWriteOperations for Database { genesis_hash: B256, l1_block_number: u64, ) -> Result { - self.tx_mut(move |tx| async move { tx.unwind(genesis_hash, l1_block_number).await }).await + metered!( + DatabaseOperation::Unwind, + self, + tx_mut(move |tx| async move { tx.unwind(genesis_hash, l1_block_number).await }) + ) } async fn insert_signature( @@ -304,7 +403,11 @@ impl DatabaseWriteOperations for Database { block_hash: B256, signature: Signature, ) -> Result<(), DatabaseError> { - self.tx_mut(move |tx| async move { tx.insert_signature(block_hash, signature).await }).await + metered!( + DatabaseOperation::InsertSignature, + self, + tx_mut(move |tx| async move { tx.insert_signature(block_hash, signature).await }) + ) } } @@ -314,23 +417,43 @@ impl DatabaseReadOperations for Database { &self, batch_index: u64, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_batch_by_index(batch_index).await }).await + metered!( + DatabaseOperation::GetBatchByIndex, + self, + tx(move |tx| async move { tx.get_batch_by_index(batch_index).await }) + ) } async fn get_latest_l1_block_number(&self) -> Result { - self.tx(|tx| async move { tx.get_latest_l1_block_number().await }).await + metered!( + DatabaseOperation::GetLatestL1BlockNumber, + self, + tx(|tx| async move { tx.get_latest_l1_block_number().await }) + ) } async fn get_finalized_l1_block_number(&self) -> Result { - self.tx(|tx| async move { tx.get_finalized_l1_block_number().await }).await + metered!( + DatabaseOperation::GetFinalizedL1BlockNumber, + self, + tx(|tx| async move { tx.get_finalized_l1_block_number().await }) + ) } async fn get_processed_l1_block_number(&self) -> Result { - self.tx(|tx| async move { tx.get_processed_l1_block_number().await }).await + metered!( + DatabaseOperation::GetProcessedL1BlockNumber, + self, + tx(|tx| async move { tx.get_processed_l1_block_number().await }) + ) } async fn get_l2_head_block_number(&self) -> Result { - self.tx(|tx| async move { tx.get_l2_head_block_number().await }).await + metered!( + DatabaseOperation::GetL2HeadBlockNumber, + self, + tx(|tx| async move { tx.get_l2_head_block_number().await }) + ) } async fn get_n_l1_messages( @@ -338,11 +461,14 @@ impl DatabaseReadOperations for Database { start: Option, n: usize, ) -> Result, DatabaseError> { - self.tx(move |tx| { - let start = start.clone(); - async move { tx.get_n_l1_messages(start, n).await } - }) - .await + metered!( + DatabaseOperation::GetNL1Messages, + self, + tx(move |tx| { + let start = start.clone(); + async move { tx.get_n_l1_messages(start, n).await } + }) + ) } async fn get_n_l2_block_data_hint( @@ -350,48 +476,73 @@ impl DatabaseReadOperations for Database { block_number: u64, n: usize, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_n_l2_block_data_hint(block_number, n).await }).await + metered!( + DatabaseOperation::GetNL2BlockDataHint, + self, + tx(move |tx| async move { tx.get_n_l2_block_data_hint(block_number, n).await }) + ) } async fn get_l2_block_and_batch_info_by_hash( &self, block_hash: B256, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_l2_block_and_batch_info_by_hash(block_hash).await }) - .await + metered!( + DatabaseOperation::GetL2BlockAndBatchInfoByHash, + self, + tx(move |tx| async move { tx.get_l2_block_and_batch_info_by_hash(block_hash).await }) + ) } async fn get_l2_block_info_by_number( &self, block_number: u64, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_l2_block_info_by_number(block_number).await }).await + metered!( + DatabaseOperation::GetL2BlockInfoByNumber, + self, + tx(move |tx| async move { tx.get_l2_block_info_by_number(block_number).await }) + ) } async fn get_latest_safe_l2_info( &self, ) -> Result, DatabaseError> { - self.tx(|tx| async move { tx.get_latest_safe_l2_info().await }).await + metered!( + DatabaseOperation::GetLatestSafeL2Info, + self, + tx(|tx| async move { tx.get_latest_safe_l2_info().await }) + ) } async fn get_highest_block_for_batch_hash( &self, batch_hash: B256, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_highest_block_for_batch_hash(batch_hash).await }) - .await + metered!( + DatabaseOperation::GetHighestBlockForBatchHash, + self, + tx(move |tx| async move { tx.get_highest_block_for_batch_hash(batch_hash).await }) + ) } async fn get_highest_block_for_batch_index( &self, batch_index: u64, ) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_highest_block_for_batch_index(batch_index).await }) - .await + metered!( + DatabaseOperation::GetHighestBlockForBatchIndex, + self, + tx(move |tx| async move { tx.get_highest_block_for_batch_index(batch_index).await }) + ) } async fn get_signature(&self, block_hash: B256) -> Result, DatabaseError> { - self.tx(move |tx| async move { tx.get_signature(block_hash).await }).await + metered!( + DatabaseOperation::GetSignature, + self, + tx(move |tx| async move { tx.get_signature(block_hash).await }) + ) } } @@ -488,8 +639,12 @@ impl DatabaseTransactionProvider for DatabaseInner { /// Creates a new [`TX`] which can be used for read-only operations. async fn tx(&self) -> Result { tracing::trace!(target: "scroll::db", "Creating new read-only transaction"); + let now = std::time::Instant::now(); let permit = self.read_locks.clone().acquire_owned().await.unwrap(); - Ok(TX::new(self.connection.clone().begin().await?, permit)) + let tx = TX::new(self.connection.clone().begin().await?, permit); + let duration = now.elapsed().as_millis() as f64; + self.metrics.read_lock_acquire_duration.record(duration); + Ok(tx) } /// Creates a new [`TXMut`] which can be used for atomic read and write operations. @@ -499,7 +654,7 @@ impl DatabaseTransactionProvider for DatabaseInner { let guard = self.write_lock.clone().lock_owned().await; let tx_mut = TXMut::new(self.connection.clone().begin().await?, guard); let duration = now.elapsed().as_millis() as f64; - self.metrics.lock_acquire_duration.record(duration); + self.metrics.write_lock_acquire_duration.record(duration); tracing::trace!(target: "scroll::db", "Acquired write lock in {} ms", duration); Ok(tx_mut) } diff --git a/crates/database/db/src/metrics.rs b/crates/database/db/src/metrics.rs index 9fea1258..a352ea61 100644 --- a/crates/database/db/src/metrics.rs +++ b/crates/database/db/src/metrics.rs @@ -1,11 +1,113 @@ use metrics::Histogram; use metrics_derive::Metrics; +use strum::EnumIter; -/// The metrics for the [`super::Database`]. +/// The metrics for the [`super::db::DatabaseInner`]. #[derive(Metrics, Clone)] #[metrics(scope = "database")] pub(crate) struct DatabaseMetrics { - /// Time (ms) to acquire a DB lock/connection - #[metric(describe = "Time to acquire a database lock (ms)")] - pub lock_acquire_duration: Histogram, + /// Time (ms) to acquire a DB read lock/connection. + #[metric(describe = "Time to acquire a database read lock (ms)")] + pub read_lock_acquire_duration: Histogram, + /// Time (ms) to acquire a DB write lock/connection. + #[metric(describe = "Time to acquire a database write lock (ms)")] + pub write_lock_acquire_duration: Histogram, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, EnumIter)] +pub(crate) enum DatabaseOperation { + // Write operations + InsertBatch, + FinalizeBatchesUpToIndex, + SetLatestL1BlockNumber, + SetFinalizedL1BlockNumber, + SetProcessedL1BlockNumber, + SetL2HeadBlockNumber, + FetchAndUpdateUnprocessedFinalizedBatches, + DeleteBatchesGtBlockNumber, + DeleteBatchesGtBatchIndex, + InsertL1Message, + UpdateSkippedL1Messages, + DeleteL1MessagesGt, + PrepareOnStartup, + DeleteL2BlocksGtBlockNumber, + DeleteL2BlocksGtBatchIndex, + InsertBlocks, + InsertBlock, + InsertGenesisBlock, + UpdateL1MessagesFromL2Blocks, + UpdateL1MessagesWithL2Block, + PurgeL1MessageToL2BlockMappings, + InsertBatchConsolidationOutcome, + Unwind, + InsertSignature, + // Read operations + GetBatchByIndex, + GetLatestL1BlockNumber, + GetFinalizedL1BlockNumber, + GetProcessedL1BlockNumber, + GetL2HeadBlockNumber, + GetNL1Messages, + GetNL2BlockDataHint, + GetL2BlockAndBatchInfoByHash, + GetL2BlockInfoByNumber, + GetLatestSafeL2Info, + GetHighestBlockForBatchHash, + GetHighestBlockForBatchIndex, + GetSignature, +} + +impl DatabaseOperation { + /// Returns the str representation of the [`DatabaseOperation`]. + pub(crate) const fn as_str(&self) -> &'static str { + match self { + Self::InsertBatch => "insert_batch", + Self::FinalizeBatchesUpToIndex => "finalize_batches_up_to_index", + Self::SetLatestL1BlockNumber => "set_latest_l1_block_number", + Self::SetFinalizedL1BlockNumber => "set_finalized_l1_block_number", + Self::SetProcessedL1BlockNumber => "set_processed_l1_block_number", + Self::SetL2HeadBlockNumber => "set_l2_head_block_number", + Self::FetchAndUpdateUnprocessedFinalizedBatches => { + "fetch_and_update_unprocessed_finalized_batches" + } + Self::DeleteBatchesGtBlockNumber => "delete_batches_gt_block_number", + Self::DeleteBatchesGtBatchIndex => "delete_batches_gt_batch_index", + Self::InsertL1Message => "insert_l1_message", + Self::UpdateSkippedL1Messages => "update_skipped_l1_messages", + Self::DeleteL1MessagesGt => "delete_l1_messages_gt", + Self::PrepareOnStartup => "prepare_on_startup", + Self::DeleteL2BlocksGtBlockNumber => "delete_l2_blocks_gt_block_number", + Self::DeleteL2BlocksGtBatchIndex => "delete_l2_blocks_gt_batch_index", + Self::InsertBlocks => "insert_blocks", + Self::InsertBlock => "insert_block", + Self::InsertGenesisBlock => "insert_genesis_block", + Self::UpdateL1MessagesFromL2Blocks => "update_l1_messages_from_l2_blocks", + Self::UpdateL1MessagesWithL2Block => "update_l1_messages_with_l2_block", + Self::PurgeL1MessageToL2BlockMappings => "purge_l1_message_to_l2_block_mappings", + Self::InsertBatchConsolidationOutcome => "insert_batch_consolidation_outcome", + Self::Unwind => "unwind", + Self::InsertSignature => "insert_signature", + Self::GetBatchByIndex => "get_batch_by_index", + Self::GetLatestL1BlockNumber => "get_latest_l1_block_number", + Self::GetFinalizedL1BlockNumber => "get_finalized_l1_block_number", + Self::GetProcessedL1BlockNumber => "get_processed_l1_block_number", + Self::GetL2HeadBlockNumber => "get_l2_head_block_number", + Self::GetNL1Messages => "get_n_l1_messages", + Self::GetNL2BlockDataHint => "get_n_l2_block_data_hint", + Self::GetL2BlockAndBatchInfoByHash => "get_l2_block_and_batch_info_by_hash", + Self::GetL2BlockInfoByNumber => "get_l2_block_info_by_number", + Self::GetLatestSafeL2Info => "get_latest_safe_l2_info", + Self::GetHighestBlockForBatchHash => "get_highest_block_for_batch_hash", + Self::GetHighestBlockForBatchIndex => "get_highest_block_for_batch_index", + Self::GetSignature => "get_signature", + } + } +} + +/// The metrics for the [`super::Database`]. +#[derive(Metrics, Clone)] +#[metrics(scope = "database")] +pub(crate) struct DatabaseOperationMetrics { + /// The duration of the task for the chain orchestrator. + pub task_duration: Histogram, }