Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions consensus/core/src/errors/consensus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,12 @@ pub enum ConsensusError {
#[error("difficulty error: {0}")]
DifficultyError(#[from] DifficultyError),

#[error("under min allowed window size ({0} < {1})")]
UnderMinWindowSizeAllowed(usize, usize),

#[error("window data has only {0} entries -- this usually happens when the node has just began syncing")]
InsufficientWindowData(usize),

#[error("{0}")]
General(&'static str),

Expand Down
6 changes: 0 additions & 6 deletions consensus/core/src/errors/difficulty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,6 @@ use thiserror::Error;

#[derive(Error, Debug, Clone)]
pub enum DifficultyError {
#[error("under min allowed window size ({0} < {1})")]
UnderMinWindowSizeAllowed(usize, usize),

#[error("window data has only {0} entries -- this usually happens when the node has just began syncing")]
InsufficientWindowData(usize),

#[error("min window timestamp is equal to the max window timestamp")]
EmptyTimestampRange,
}
Expand Down
45 changes: 37 additions & 8 deletions consensus/src/consensus/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ use crate::{
ProcessingCounters,
},
processes::{
difficulty::calc_work,
ghostdag::ordering::SortableBlock,
window::{WindowManager, WindowType},
},
Expand All @@ -58,7 +59,6 @@ use kaspa_consensus_core::{
errors::{
coinbase::CoinbaseResult,
consensus::{ConsensusError, ConsensusResult},
difficulty::DifficultyError,
pruning::PruningImportError,
tx::TxResult,
},
Expand Down Expand Up @@ -469,12 +469,41 @@ impl Consensus {
}

fn estimate_network_hashes_per_second_impl(&self, ghostdag_data: &GhostdagData, window_size: usize) -> ConsensusResult<u64> {
let window = match self.services.window_manager.block_window(ghostdag_data, WindowType::VaryingWindow(window_size)) {
Ok(w) => w,
Err(RuleError::InsufficientDaaWindowSize(s)) => return Err(DifficultyError::InsufficientWindowData(s).into()),
Err(e) => panic!("unexpected error: {e}"),
};
Ok(self.services.window_manager.estimate_network_hashes_per_second(window)?)
const MIN_WINDOW_SIZE: usize = 1000;
if window_size < MIN_WINDOW_SIZE {
return Err(ConsensusError::UnderMinWindowSizeAllowed(window_size, MIN_WINDOW_SIZE));
}

let mut count = 0;
let mut red_work: BlueWorkType = 0.into();
let mut bottom = ghostdag_data.selected_parent;
for chain_block in self.services.reachability_service.default_backward_chain_iterator(ghostdag_data.selected_parent) {
let gd = self.get_ghostdag_data(chain_block).unwrap();
for red in &gd.mergeset_reds {
let red_header = self.headers_store.get_header(*red).unwrap();
Copy link
Collaborator

@IzioDev IzioDev Aug 9, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it safe to assume the header will be in the store for the whole rpc thread execution?

Is there a lock on the store?

edit: should be reasonable to assume the iterator won't go up until pruning point since the window is meant to be reasonable

red_work = red_work + calc_work(red_header.bits);
}
count += gd.mergeset_blues.len() + gd.mergeset_reds.len();
bottom = chain_block;
if count >= window_size {
break;
}
}

if count < window_size {
return Err(ConsensusError::InsufficientWindowData(count));
}

let sp_header = self.headers_store.get_header(ghostdag_data.selected_parent).unwrap();
let bottom_header = self.headers_store.get_header(bottom).unwrap();
let blue_work = sp_header.blue_work - bottom_header.blue_work;
let total_work = blue_work + red_work;
let time_diff = (sp_header.timestamp - bottom_header.timestamp) / 1000; // Time difference in seconds
if time_diff == 0 {
return Err(ConsensusError::General("time difference is zero, cannot estimate hashes per second"));
}
let hashes_per_second = (total_work / time_diff).as_u64();
Ok(hashes_per_second)
}

fn pruning_point_compact_headers(&self) -> Vec<(Hash, CompactHeaderData)> {
Expand Down Expand Up @@ -1115,7 +1144,7 @@ impl ConsensusApi for Consensus {
let ghostdag_data = self.ghostdag_store.get_data(hash).unwrap();
// The selected parent header is used within to check for sampling activation, so we verify its existence first
if !self.headers_store.has(ghostdag_data.selected_parent).unwrap() {
return Err(ConsensusError::DifficultyError(DifficultyError::InsufficientWindowData(0)));
return Err(ConsensusError::InsufficientWindowData(0));
}
self.estimate_network_hashes_per_second_impl(&ghostdag_data, window_size)
}
Expand Down
32 changes: 0 additions & 32 deletions consensus/src/processes/difficulty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use crate::model::stores::{
};
use kaspa_consensus_core::{
config::params::{ForkActivation, MAX_DIFFICULTY_TARGET_AS_F64},
errors::difficulty::{DifficultyError, DifficultyResult},
BlockHashSet, BlueWorkType, MAX_WORK_LEVEL,
};
use kaspa_core::{info, log::CRESCENDO_KEYWORD};
Expand Down Expand Up @@ -44,29 +43,6 @@ trait DifficultyManagerExtension {
.collect()
}

fn internal_estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
// TODO: perhaps move this const
const MIN_WINDOW_SIZE: usize = 1000;
let window_size = window.len();
if window_size < MIN_WINDOW_SIZE {
return Err(DifficultyError::UnderMinWindowSizeAllowed(window_size, MIN_WINDOW_SIZE));
}
let difficulty_blocks = self.get_difficulty_blocks(window);
let (min_ts, max_ts) = difficulty_blocks.iter().map(|x| x.timestamp).minmax().into_option().unwrap();
if min_ts == max_ts {
return Err(DifficultyError::EmptyTimestampRange);
}
let window_duration = (max_ts - min_ts) / 1000; // Divided by 1000 to convert milliseconds to seconds
if window_duration == 0 {
return Ok(0);
}

let (min_blue_work, max_blue_work) =
difficulty_blocks.iter().map(|x| x.sortable_block.blue_work).minmax().into_option().unwrap();

Ok(((max_blue_work - min_blue_work) / window_duration).as_u64())
}

#[inline]
fn check_min_difficulty_window_size(difficulty_window_size: usize, min_difficulty_window_size: usize) {
assert!(
Expand Down Expand Up @@ -156,10 +132,6 @@ impl<T: HeaderStoreReader> FullDifficultyManager<T> {
let new_target = average_target * max(max_ts - min_ts, 1) / (self.target_time_per_block * difficulty_blocks_len);
Uint256::try_from(new_target.min(self.max_difficulty_target)).expect("max target < Uint256::MAX").compact_target_bits()
}

pub fn estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
self.internal_estimate_network_hashes_per_second(window)
}
}

impl<T: HeaderStoreReader> DifficultyManagerExtension for FullDifficultyManager<T> {
Expand Down Expand Up @@ -396,10 +368,6 @@ impl<T: HeaderStoreReader, U: GhostdagStoreReader> SampledDifficultyManager<T, U

Uint256::try_from(new_target.min(self.max_difficulty_target)).expect("max target < Uint256::MAX").compact_target_bits()
}

pub fn estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
self.internal_estimate_network_hashes_per_second(window)
}
}

impl<T: HeaderStoreReader, U: GhostdagStoreReader> DifficultyManagerExtension for SampledDifficultyManager<T, U> {
Expand Down
15 changes: 1 addition & 14 deletions consensus/src/processes/window.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
use kaspa_consensus_core::{
blockhash::{BlockHashExtensions, ORIGIN},
config::{genesis::GenesisBlock, params::ForkActivation},
errors::{block::RuleError, difficulty::DifficultyResult},
errors::block::RuleError,
BlockHashSet, BlueWorkType, HashMapCustomHasher,
};
use kaspa_core::{info, log::CRESCENDO_KEYWORD};
Expand Down Expand Up @@ -57,7 +57,6 @@ pub trait WindowManager {
fn calculate_difficulty_bits(&self, ghostdag_data: &GhostdagData, daa_window: &DaaWindow) -> u32;
fn calc_past_median_time(&self, ghostdag_data: &GhostdagData) -> Result<(u64, Arc<BlockWindowHeap>), RuleError>;
fn calc_past_median_time_for_known_hash(&self, hash: Hash) -> Result<u64, RuleError>;
fn estimate_network_hashes_per_second(&self, window: Arc<BlockWindowHeap>) -> DifficultyResult<u64>;
fn window_size(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> usize;
fn sample_rate(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> u64;

Expand Down Expand Up @@ -263,10 +262,6 @@ impl<T: GhostdagStoreReader, U: BlockWindowCacheReader + BlockWindowCacheWriter,
}
}

fn estimate_network_hashes_per_second(&self, window: Arc<BlockWindowHeap>) -> DifficultyResult<u64> {
self.difficulty_manager.estimate_network_hashes_per_second(&window)
}

fn window_size(&self, _ghostdag_data: &GhostdagData, window_type: WindowType) -> usize {
match window_type {
WindowType::DifficultyWindow => self.difficulty_window_size,
Expand Down Expand Up @@ -682,10 +677,6 @@ impl<T: GhostdagStoreReader, U: BlockWindowCacheReader + BlockWindowCacheWriter,
}
}

fn estimate_network_hashes_per_second(&self, window: Arc<BlockWindowHeap>) -> DifficultyResult<u64> {
self.difficulty_manager.estimate_network_hashes_per_second(&window)
}

fn window_size(&self, _ghostdag_data: &GhostdagData, window_type: WindowType) -> usize {
match window_type {
WindowType::DifficultyWindow => self.difficulty_window_size,
Expand Down Expand Up @@ -865,10 +856,6 @@ impl<T: GhostdagStoreReader, U: BlockWindowCacheReader + BlockWindowCacheWriter,
}
}

fn estimate_network_hashes_per_second(&self, window: Arc<BlockWindowHeap>) -> DifficultyResult<u64> {
self.sampled_window_manager.estimate_network_hashes_per_second(window)
}

fn window_size(&self, ghostdag_data: &GhostdagData, window_type: WindowType) -> usize {
match self.sampling(ghostdag_data.selected_parent) {
true => self.sampled_window_manager.window_size(ghostdag_data, window_type),
Expand Down