@@ -706,7 +706,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
706706 let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
707707 let dir_liq = liq. as_directed ( source, target, 0 , amt, self . decay_params ) ;
708708
709- let ( min_buckets, max_buckets, _) = dir_liq. liquidity_history
709+ let ( min_buckets, max_buckets, _, _ ) = dir_liq. liquidity_history
710710 . get_decayed_buckets ( now, * dir_liq. last_updated ,
711711 self . decay_params . historical_no_updates_half_life ) ;
712712
@@ -788,7 +788,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
788788 /// in the top and bottom bucket, and roughly with similar (recent) frequency.
789789 ///
790790 /// Because the datapoints are decayed slowly over time, values will eventually return to
791- /// `Some(([0 ; 32], [0 ; 32]))`.
791+ /// `Some(([1 ; 32], [1 ; 32]))` and then to `None` once no datapoints remain .
792792 ///
793793 /// In order to fetch a single success probability from the buckets provided here, as used in
794794 /// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
@@ -802,9 +802,18 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
802802 let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
803803 let dir_liq = liq. as_directed ( source, target, 0 , amt, self . decay_params ) ;
804804
805- let ( min_buckets, mut max_buckets, _) = dir_liq. liquidity_history
806- . get_decayed_buckets ( dir_liq. now , * dir_liq. last_updated ,
807- self . decay_params . historical_no_updates_half_life ) ;
805+ let ( min_buckets, mut max_buckets, valid_points, required_decays) =
806+ dir_liq. liquidity_history . get_decayed_buckets (
807+ dir_liq. now , * dir_liq. last_updated ,
808+ self . decay_params . historical_no_updates_half_life
809+ ) ;
810+
811+ // If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point
812+ // scheme), treat it as if we were fully decayed.
813+ if valid_points. checked_shr ( required_decays) . unwrap_or ( 0 ) < 32 * 32 {
814+ return None ;
815+ }
816+
808817 // Note that the liquidity buckets are an offset from the edge, so we inverse
809818 // the max order to get the probabilities from zero.
810819 max_buckets. reverse ( ) ;
@@ -1747,15 +1756,23 @@ mod bucketed_history {
17471756 impl < D : Deref < Target = HistoricalBucketRangeTracker > > HistoricalMinMaxBuckets < D > {
17481757 #[ inline]
17491758 pub ( super ) fn get_decayed_buckets < T : Time > ( & self , now : T , last_updated : T , half_life : Duration )
1750- -> ( [ u16 ; 32 ] , [ u16 ; 32 ] , u32 ) {
1759+ -> ( [ u16 ; 32 ] , [ u16 ; 32 ] , u64 , u32 ) {
17511760 let required_decays = now. duration_since ( last_updated) . as_secs ( )
17521761 . checked_div ( half_life. as_secs ( ) )
17531762 . map_or ( u32:: max_value ( ) , |decays| cmp:: min ( decays, u32:: max_value ( ) as u64 ) as u32 ) ;
1763+
1764+ let mut total_valid_points_tracked = 0 ;
1765+ for ( min_idx, min_bucket) in self . min_liquidity_offset_history . buckets . iter ( ) . enumerate ( ) {
1766+ for max_bucket in self . max_liquidity_offset_history . buckets . iter ( ) . take ( 32 - min_idx) {
1767+ total_valid_points_tracked += ( * min_bucket as u64 ) * ( * max_bucket as u64 ) ;
1768+ }
1769+ }
1770+
17541771 let mut min_buckets = * self . min_liquidity_offset_history ;
17551772 min_buckets. time_decay_data ( required_decays) ;
17561773 let mut max_buckets = * self . max_liquidity_offset_history ;
17571774 max_buckets. time_decay_data ( required_decays) ;
1758- ( min_buckets. buckets , max_buckets. buckets , required_decays)
1775+ ( min_buckets. buckets , max_buckets. buckets , total_valid_points_tracked , required_decays)
17591776 }
17601777
17611778 #[ inline]
@@ -1766,26 +1783,16 @@ mod bucketed_history {
17661783 // historical liquidity bucket (min, max) combinations (where min_idx < max_idx) and, for
17671784 // each, calculate the probability of success given our payment amount, then total the
17681785 // weighted average probability of success.
1769- let mut total_valid_points_tracked = 0 ;
1770-
17711786 let payment_pos = amount_to_pos ( amount_msat, capacity_msat) ;
17721787 #[ cfg( not( fuzzing) ) ]
17731788 debug_assert ! ( payment_pos <= MIN_SIZE_BUCKETS ) ; // Note that we allow the max+1 sentinel
17741789 if payment_pos >= MIN_SIZE_BUCKETS { return None ; }
17751790
17761791 // Check if all our buckets are zero, once decayed and treat it as if we had no data. We
17771792 // don't actually use the decayed buckets, though, as that would lose precision.
1778- let ( decayed_min_buckets, decayed_max_buckets, required_decays) =
1779- self . get_decayed_buckets ( now, last_updated, half_life) ;
1780- if decayed_min_buckets. iter ( ) . all ( |v| * v == 0 ) || decayed_max_buckets. iter ( ) . all ( |v| * v == 0 ) {
1781- return None ;
1782- }
1793+ let ( decayed_min_buckets, decayed_max_buckets, total_valid_points_tracked, required_decays)
1794+ = self . get_decayed_buckets ( now, last_updated, half_life) ;
17831795
1784- for ( min_idx, min_bucket) in self . min_liquidity_offset_history . buckets . iter ( ) . enumerate ( ) {
1785- for max_bucket in self . max_liquidity_offset_history . buckets . iter ( ) . take ( 32 - min_idx) {
1786- total_valid_points_tracked += ( * min_bucket as u64 ) * ( * max_bucket as u64 ) ;
1787- }
1788- }
17891796 // If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat
17901797 // it as if we were fully decayed.
17911798 if total_valid_points_tracked. checked_shr ( required_decays) . unwrap_or ( 0 ) < 32 * 32 {
@@ -3109,7 +3116,7 @@ mod tests {
31093116 // Once fully decayed we still have data, but its all-0s. In the future we may remove the
31103117 // data entirely instead.
31113118 assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
3112- Some ( ( [ 0 ; 32 ] , [ 0 ; 32 ] ) ) ) ;
3119+ None ) ;
31133120 assert_eq ! ( scorer. historical_estimated_payment_success_probability( 42 , & target, 1 ) , None ) ;
31143121
31153122 let mut usage = ChannelUsage {
0 commit comments