@@ -719,7 +719,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
719719							min_liquidity_offset_history :  & dir_liq. min_liquidity_offset_history , 
720720							max_liquidity_offset_history :  & dir_liq. max_liquidity_offset_history , 
721721						} ; 
722- 						let  ( min_buckets,  max_buckets,  _)  = buckets. get_decayed_buckets ( now, 
722+ 						let  ( min_buckets,  max_buckets,  _,  _ )  = buckets. get_decayed_buckets ( now, 
723723							* dir_liq. last_updated ,  self . decay_params . historical_no_updates_half_life ) ; 
724724
725725						log_debug ! ( self . logger,  core:: concat!( 
@@ -800,7 +800,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
800800/// in the top and bottom bucket, and roughly with similar (recent) frequency. 
801801/// 
802802/// Because the datapoints are decayed slowly over time, values will eventually return to 
803- /// `Some(([0 ; 32], [0 ; 32]))`. 
803+ /// `Some(([1 ; 32], [1 ; 32]))` and then to `None` once no datapoints remain . 
804804/// 
805805/// In order to convert this into a success probability, as used in the scoring model, see 
806806/// [`Self::historical_estimated_payment_success_probability`]. 
@@ -818,8 +818,16 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
818818						min_liquidity_offset_history :  & dir_liq. min_liquidity_offset_history , 
819819						max_liquidity_offset_history :  & dir_liq. max_liquidity_offset_history , 
820820					} ; 
821- 					let  ( min_buckets,  mut  max_buckets,  _)  = buckets. get_decayed_buckets ( T :: now ( ) , 
822- 						* dir_liq. last_updated ,  self . decay_params . historical_no_updates_half_life ) ; 
821+ 					let  ( min_buckets,  mut  max_buckets,  valid_points,  required_decays)  =
822+ 						buckets. get_decayed_buckets ( T :: now ( ) ,  * dir_liq. last_updated , 
823+ 							self . decay_params . historical_no_updates_half_life ) ; 
824+ 
825+ 					// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point 
826+ 					// scheme), treat it as if we were fully decayed. 
827+ 					if  valid_points. checked_shr ( required_decays) . unwrap_or ( 0 )  < 32 * 32  { 
828+ 						return  None ; 
829+ 					} 
830+ 
823831					// Note that the liquidity buckets are an offset from the edge, so we inverse 
824832					// the max order to get the probabilities from zero. 
825833					max_buckets. reverse ( ) ; 
@@ -1749,15 +1757,23 @@ mod bucketed_history {
17491757	impl  HistoricalMinMaxBuckets < ' _ >  { 
17501758		#[ inline]  
17511759		pub ( super )  fn  get_decayed_buckets < T :  Time > ( & self ,  now :  T ,  last_updated :  T ,  half_life :  Duration ) 
1752- 		-> ( [ u16 ;  32 ] ,  [ u16 ;  32 ] ,  u32 )  { 
1760+ 		-> ( [ u16 ;  32 ] ,  [ u16 ;  32 ] ,  u64 ,   u32 )  { 
17531761			let  required_decays = now. duration_since ( last_updated) . as_secs ( ) 
17541762				. checked_div ( half_life. as_secs ( ) ) 
17551763				. map_or ( u32:: max_value ( ) ,  |decays| cmp:: min ( decays,  u32:: max_value ( )  as  u64 )  as  u32 ) ; 
1764+ 
1765+ 			let  mut  total_valid_points_tracked = 0 ; 
1766+ 			for  ( min_idx,  min_bucket)  in  self . min_liquidity_offset_history . buckets . iter ( ) . enumerate ( )  { 
1767+ 				for  max_bucket in  self . max_liquidity_offset_history . buckets . iter ( ) . take ( 32  - min_idx)  { 
1768+ 					total_valid_points_tracked += ( * min_bucket as  u64 )  *  ( * max_bucket as  u64 ) ; 
1769+ 				} 
1770+ 			} 
1771+ 
17561772			let  mut  min_buckets = * self . min_liquidity_offset_history ; 
17571773			min_buckets. time_decay_data ( required_decays) ; 
17581774			let  mut  max_buckets = * self . max_liquidity_offset_history ; 
17591775			max_buckets. time_decay_data ( required_decays) ; 
1760- 			( min_buckets. buckets ,  max_buckets. buckets ,  required_decays) 
1776+ 			( min_buckets. buckets ,  max_buckets. buckets ,  total_valid_points_tracked ,   required_decays) 
17611777		} 
17621778
17631779		#[ inline]  
@@ -1768,26 +1784,16 @@ mod bucketed_history {
17681784			// historical liquidity bucket (min, max) combinations (where min_idx < max_idx) and, for 
17691785			// each, calculate the probability of success given our payment amount, then total the 
17701786			// weighted average probability of success. 
1771- 			let  mut  total_valid_points_tracked = 0 ; 
1772- 
17731787			let  payment_pos = amount_to_pos ( amount_msat,  capacity_msat) ; 
17741788			#[ cfg( not( fuzzing) ) ]  
17751789			debug_assert ! ( payment_pos <= MIN_SIZE_BUCKETS ) ;  // Note that we allow the max+1 sentinel 
17761790			if  payment_pos >= MIN_SIZE_BUCKETS  {  return  None ;  } 
17771791
17781792			// Check if all our buckets are zero, once decayed and treat it as if we had no data. We 
17791793			// don't actually use the decayed buckets, though, as that would lose precision. 
1780- 			let  ( decayed_min_buckets,  decayed_max_buckets,  required_decays)  =
1781- 				self . get_decayed_buckets ( now,  last_updated,  half_life) ; 
1782- 			if  decayed_min_buckets. iter ( ) . all ( |v| * v == 0 )  || decayed_max_buckets. iter ( ) . all ( |v| * v == 0 )  { 
1783- 				return  None ; 
1784- 			} 
1794+ 			let  ( decayed_min_buckets,  decayed_max_buckets,  total_valid_points_tracked,  required_decays) 
1795+ 				= self . get_decayed_buckets ( now,  last_updated,  half_life) ; 
17851796
1786- 			for  ( min_idx,  min_bucket)  in  self . min_liquidity_offset_history . buckets . iter ( ) . enumerate ( )  { 
1787- 				for  max_bucket in  self . max_liquidity_offset_history . buckets . iter ( ) . take ( 32  - min_idx)  { 
1788- 					total_valid_points_tracked += ( * min_bucket as  u64 )  *  ( * max_bucket as  u64 ) ; 
1789- 				} 
1790- 			} 
17911797			// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat 
17921798			// it as if we were fully decayed. 
17931799			if  total_valid_points_tracked. checked_shr ( required_decays) . unwrap_or ( 0 )  < 32 * 32  { 
@@ -3097,7 +3103,7 @@ mod tests {
30973103		// Once fully decayed we still have data, but its all-0s. In the future we may remove the 
30983104		// data entirely instead. 
30993105		assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 ,  & target) , 
3100- 			Some ( ( [ 0 ;   32 ] ,   [ 0 ;   32 ] ) ) ) ; 
3106+ 			None ) ; 
31013107		assert_eq ! ( scorer. historical_estimated_payment_success_probability( 42 ,  & target,  1 ) ,  None ) ; 
31023108
31033109		let  mut  usage = ChannelUsage  { 
0 commit comments