diff --git a/consensus/core/src/errors/consensus.rs b/consensus/core/src/errors/consensus.rs index 58c5ed35e9..f717687aa9 100644 --- a/consensus/core/src/errors/consensus.rs +++ b/consensus/core/src/errors/consensus.rs @@ -32,6 +32,12 @@ pub enum ConsensusError { #[error("difficulty error: {0}")] DifficultyError(#[from] DifficultyError), + #[error("under min allowed window size ({0} < {1})")] + UnderMinWindowSizeAllowed(usize, usize), + + #[error("window data has only {0} entries -- this usually happens when the node has just began syncing")] + InsufficientWindowData(usize), + #[error("{0}")] General(&'static str), diff --git a/consensus/core/src/errors/difficulty.rs b/consensus/core/src/errors/difficulty.rs index 2924e24191..f93065a7a2 100644 --- a/consensus/core/src/errors/difficulty.rs +++ b/consensus/core/src/errors/difficulty.rs @@ -2,12 +2,6 @@ use thiserror::Error; #[derive(Error, Debug, Clone)] pub enum DifficultyError { - #[error("under min allowed window size ({0} < {1})")] - UnderMinWindowSizeAllowed(usize, usize), - - #[error("window data has only {0} entries -- this usually happens when the node has just began syncing")] - InsufficientWindowData(usize), - #[error("min window timestamp is equal to the max window timestamp")] EmptyTimestampRange, } diff --git a/consensus/src/consensus/mod.rs b/consensus/src/consensus/mod.rs index 17637dc20d..81e61c36cb 100644 --- a/consensus/src/consensus/mod.rs +++ b/consensus/src/consensus/mod.rs @@ -39,6 +39,7 @@ use crate::{ ProcessingCounters, }, processes::{ + difficulty::calc_work, ghostdag::ordering::SortableBlock, window::{WindowManager, WindowType}, }, @@ -58,7 +59,6 @@ use kaspa_consensus_core::{ errors::{ coinbase::CoinbaseResult, consensus::{ConsensusError, ConsensusResult}, - difficulty::DifficultyError, pruning::PruningImportError, tx::TxResult, }, @@ -469,12 +469,41 @@ impl Consensus { } fn estimate_network_hashes_per_second_impl(&self, ghostdag_data: &GhostdagData, window_size: usize) -> ConsensusResult { - let window = match self.services.window_manager.block_window(ghostdag_data, WindowType::VaryingWindow(window_size)) { - Ok(w) => w, - Err(RuleError::InsufficientDaaWindowSize(s)) => return Err(DifficultyError::InsufficientWindowData(s).into()), - Err(e) => panic!("unexpected error: {e}"), - }; - Ok(self.services.window_manager.estimate_network_hashes_per_second(window)?) + const MIN_WINDOW_SIZE: usize = 1000; + if window_size < MIN_WINDOW_SIZE { + return Err(ConsensusError::UnderMinWindowSizeAllowed(window_size, MIN_WINDOW_SIZE)); + } + + let mut count = 0; + let mut red_work: BlueWorkType = 0.into(); + let mut bottom = ghostdag_data.selected_parent; + for chain_block in self.services.reachability_service.default_backward_chain_iterator(ghostdag_data.selected_parent) { + let gd = self.get_ghostdag_data(chain_block).unwrap(); + for red in &gd.mergeset_reds { + let red_header = self.headers_store.get_header(*red).unwrap(); + red_work = red_work + calc_work(red_header.bits); + } + count += gd.mergeset_blues.len() + gd.mergeset_reds.len(); + bottom = chain_block; + if count >= window_size { + break; + } + } + + if count < window_size { + return Err(ConsensusError::InsufficientWindowData(count)); + } + + let sp_header = self.headers_store.get_header(ghostdag_data.selected_parent).unwrap(); + let bottom_header = self.headers_store.get_header(bottom).unwrap(); + let blue_work = sp_header.blue_work - bottom_header.blue_work; + let total_work = blue_work + red_work; + let time_diff = (sp_header.timestamp - bottom_header.timestamp) / 1000; // Time difference in seconds + if time_diff == 0 { + return Err(ConsensusError::General("time difference is zero, cannot estimate hashes per second")); + } + let hashes_per_second = (total_work / time_diff).as_u64(); + Ok(hashes_per_second) } fn pruning_point_compact_headers(&self) -> Vec<(Hash, CompactHeaderData)> { @@ -1115,7 +1144,7 @@ impl ConsensusApi for Consensus { let ghostdag_data = self.ghostdag_store.get_data(hash).unwrap(); // The selected parent header is used within to check for sampling activation, so we verify its existence first if !self.headers_store.has(ghostdag_data.selected_parent).unwrap() { - return Err(ConsensusError::DifficultyError(DifficultyError::InsufficientWindowData(0))); + return Err(ConsensusError::InsufficientWindowData(0)); } self.estimate_network_hashes_per_second_impl(&ghostdag_data, window_size) } diff --git a/consensus/src/processes/difficulty.rs b/consensus/src/processes/difficulty.rs index c14ee35cb6..706b837fad 100644 --- a/consensus/src/processes/difficulty.rs +++ b/consensus/src/processes/difficulty.rs @@ -5,7 +5,6 @@ use crate::model::stores::{ }; use kaspa_consensus_core::{ config::params::{ForkActivation, MAX_DIFFICULTY_TARGET_AS_F64}, - errors::difficulty::{DifficultyError, DifficultyResult}, BlockHashSet, BlueWorkType, MAX_WORK_LEVEL, }; use kaspa_core::{info, log::CRESCENDO_KEYWORD}; @@ -44,29 +43,6 @@ trait DifficultyManagerExtension { .collect() } - fn internal_estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult { - // TODO: perhaps move this const - const MIN_WINDOW_SIZE: usize = 1000; - let window_size = window.len(); - if window_size < MIN_WINDOW_SIZE { - return Err(DifficultyError::UnderMinWindowSizeAllowed(window_size, MIN_WINDOW_SIZE)); - } - let difficulty_blocks = self.get_difficulty_blocks(window); - let (min_ts, max_ts) = difficulty_blocks.iter().map(|x| x.timestamp).minmax().into_option().unwrap(); - if min_ts == max_ts { - return Err(DifficultyError::EmptyTimestampRange); - } - let window_duration = (max_ts - min_ts) / 1000; // Divided by 1000 to convert milliseconds to seconds - if window_duration == 0 { - return Ok(0); - } - - let (min_blue_work, max_blue_work) = - difficulty_blocks.iter().map(|x| x.sortable_block.blue_work).minmax().into_option().unwrap(); - - Ok(((max_blue_work - min_blue_work) / window_duration).as_u64()) - } - #[inline] fn check_min_difficulty_window_size(difficulty_window_size: usize, min_difficulty_window_size: usize) { assert!( @@ -156,10 +132,6 @@ impl FullDifficultyManager { let new_target = average_target * max(max_ts - min_ts, 1) / (self.target_time_per_block * difficulty_blocks_len); Uint256::try_from(new_target.min(self.max_difficulty_target)).expect("max target < Uint256::MAX").compact_target_bits() } - - pub fn estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult { - self.internal_estimate_network_hashes_per_second(window) - } } impl DifficultyManagerExtension for FullDifficultyManager { @@ -396,10 +368,6 @@ impl SampledDifficultyManager DifficultyResult { - self.internal_estimate_network_hashes_per_second(window) - } } impl DifficultyManagerExtension for SampledDifficultyManager { diff --git a/consensus/src/processes/window.rs b/consensus/src/processes/window.rs index 4a388a84a0..45632f23a9 100644 --- a/consensus/src/processes/window.rs +++ b/consensus/src/processes/window.rs @@ -10,7 +10,7 @@ use crate::{ use kaspa_consensus_core::{ blockhash::{BlockHashExtensions, ORIGIN}, config::{genesis::GenesisBlock, params::ForkActivation}, - errors::{block::RuleError, difficulty::DifficultyResult}, + errors::block::RuleError, BlockHashSet, BlueWorkType, HashMapCustomHasher, }; use kaspa_core::{info, log::CRESCENDO_KEYWORD}; @@ -57,7 +57,6 @@ pub trait WindowManager { fn calculate_difficulty_bits(&self, ghostdag_data: &GhostdagData, daa_window: &DaaWindow) -> u32; fn calc_past_median_time(&self, ghostdag_data: &GhostdagData) -> Result<(u64, Arc), RuleError>; fn calc_past_median_time_for_known_hash(&self, hash: Hash) -> Result; - fn estimate_network_hashes_per_second(&self, window: Arc) -> DifficultyResult; fn window_size(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> usize; fn sample_rate(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> u64; @@ -263,10 +262,6 @@ impl) -> DifficultyResult { - self.difficulty_manager.estimate_network_hashes_per_second(&window) - } - fn window_size(&self, _ghostdag_data: &GhostdagData, window_type: WindowType) -> usize { match window_type { WindowType::DifficultyWindow => self.difficulty_window_size, @@ -682,10 +677,6 @@ impl) -> DifficultyResult { - self.difficulty_manager.estimate_network_hashes_per_second(&window) - } - fn window_size(&self, _ghostdag_data: &GhostdagData, window_type: WindowType) -> usize { match window_type { WindowType::DifficultyWindow => self.difficulty_window_size, @@ -865,10 +856,6 @@ impl) -> DifficultyResult { - self.sampled_window_manager.estimate_network_hashes_per_second(window) - } - fn window_size(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> usize { match self.sampling(ghostdag_data.selected_parent) { true => self.sampled_window_manager.window_size(ghostdag_data, window_type),