@@ -597,7 +597,22 @@ struct HistoricalMinMaxBuckets<'a> {
597
597
598
598
impl HistoricalMinMaxBuckets < ' _ > {
599
599
#[ inline]
600
- fn calculate_success_probability_times_billion ( & self , required_decays : u32 , payment_amt_64th_bucket : u8 ) -> Option < u64 > {
600
+ fn get_decayed_buckets < T : Time > ( & self , now : T , last_updated : T , half_life : Duration )
601
+ -> ( [ u16 ; 8 ] , [ u16 ; 8 ] , u32 ) {
602
+ let required_decays = now. duration_since ( last_updated) . as_secs ( )
603
+ . checked_div ( half_life. as_secs ( ) )
604
+ . map_or ( u32:: max_value ( ) , |decays| cmp:: min ( decays, u32:: max_value ( ) as u64 ) as u32 ) ;
605
+ let mut min_buckets = * self . min_liquidity_offset_history ;
606
+ min_buckets. time_decay_data ( required_decays) ;
607
+ let mut max_buckets = * self . max_liquidity_offset_history ;
608
+ max_buckets. time_decay_data ( required_decays) ;
609
+ ( min_buckets. buckets , max_buckets. buckets , required_decays)
610
+ }
611
+
612
+ #[ inline]
613
+ fn calculate_success_probability_times_billion < T : Time > (
614
+ & self , now : T , last_updated : T , half_life : Duration , payment_amt_64th_bucket : u8 )
615
+ -> Option < u64 > {
601
616
// If historical penalties are enabled, calculate the penalty by walking the set of
602
617
// historical liquidity bucket (min, max) combinations (where min_idx < max_idx) and, for
603
618
// each, calculate the probability of success given our payment amount, then total the
@@ -619,23 +634,22 @@ impl HistoricalMinMaxBuckets<'_> {
619
634
// less than 1/16th of a channel's capacity, or 1/8th if we used the top of the bucket.
620
635
let mut total_valid_points_tracked = 0 ;
621
636
622
- // Rather than actually decaying the individual buckets, which would lose precision, we
623
- // simply track whether all buckets would be decayed to zero, in which case we treat it as
624
- // if we had no data.
625
- let mut is_fully_decayed = true ;
626
- let mut check_track_bucket_contains_undecayed_points =
627
- |bucket_val : u16 | if bucket_val. checked_shr ( required_decays) . unwrap_or ( 0 ) > 0 { is_fully_decayed = false ; } ;
637
+ // Check if all our buckets are zero, once decayed and treat it as if we had no data. We
638
+ // don't actually use the decayed buckets, though, as that would lose precision.
639
+ let ( decayed_min_buckets, decayed_max_buckets, required_decays) =
640
+ self . get_decayed_buckets ( now, last_updated, half_life) ;
641
+ if decayed_min_buckets. iter ( ) . all ( |v| * v == 0 ) || decayed_max_buckets. iter ( ) . all ( |v| * v == 0 ) {
642
+ return None ;
643
+ }
628
644
629
645
for ( min_idx, min_bucket) in self . min_liquidity_offset_history . buckets . iter ( ) . enumerate ( ) {
630
- check_track_bucket_contains_undecayed_points ( * min_bucket) ;
631
646
for max_bucket in self . max_liquidity_offset_history . buckets . iter ( ) . take ( 8 - min_idx) {
632
647
total_valid_points_tracked += ( * min_bucket as u64 ) * ( * max_bucket as u64 ) ;
633
- check_track_bucket_contains_undecayed_points ( * max_bucket) ;
634
648
}
635
649
}
636
650
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat
637
651
// it as if we were fully decayed.
638
- if total_valid_points_tracked. checked_shr ( required_decays) . unwrap_or ( 0 ) < 32 * 32 || is_fully_decayed {
652
+ if total_valid_points_tracked. checked_shr ( required_decays) . unwrap_or ( 0 ) < 32 * 32 {
639
653
return None ;
640
654
}
641
655
@@ -717,15 +731,34 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
717
731
/// Note that this writes roughly one line per channel for which we have a liquidity estimate,
718
732
/// which may be a substantial amount of log output.
719
733
pub fn debug_log_liquidity_stats ( & self ) {
734
+ let now = T :: now ( ) ;
735
+
720
736
let graph = self . network_graph . read_only ( ) ;
721
737
for ( scid, liq) in self . channel_liquidities . iter ( ) {
722
738
if let Some ( chan_debug) = graph. channels ( ) . get ( scid) {
723
739
let log_direction = |source, target| {
724
740
if let Some ( ( directed_info, _) ) = chan_debug. as_directed_to ( target) {
725
741
let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
726
742
let dir_liq = liq. as_directed ( source, target, amt, & self . params ) ;
727
- log_debug ! ( self . logger, "Liquidity from {:?} to {:?} via {} is in the range ({}, {})" ,
728
- source, target, scid, dir_liq. min_liquidity_msat( ) , dir_liq. max_liquidity_msat( ) ) ;
743
+
744
+ let buckets = HistoricalMinMaxBuckets {
745
+ min_liquidity_offset_history : & dir_liq. min_liquidity_offset_history ,
746
+ max_liquidity_offset_history : & dir_liq. max_liquidity_offset_history ,
747
+ } ;
748
+ let ( min_buckets, max_buckets, _) = buckets. get_decayed_buckets ( now,
749
+ * dir_liq. last_updated , self . params . historical_no_updates_half_life ) ;
750
+
751
+ log_debug ! ( self . logger, core:: concat!(
752
+ "Liquidity from {} to {} via {} is in the range ({}, {}).\n " ,
753
+ "\t Historical min liquidity octile relative probabilities: {} {} {} {} {} {} {} {}\n " ,
754
+ "\t Historical max liquidity octile relative probabilities: {} {} {} {} {} {} {} {}" ) ,
755
+ source, target, scid, dir_liq. min_liquidity_msat( ) , dir_liq. max_liquidity_msat( ) ,
756
+ min_buckets[ 0 ] , min_buckets[ 1 ] , min_buckets[ 2 ] , min_buckets[ 3 ] ,
757
+ min_buckets[ 4 ] , min_buckets[ 5 ] , min_buckets[ 6 ] , min_buckets[ 7 ] ,
758
+ // Note that the liquidity buckets are an offset from the edge, so we
759
+ // inverse the max order to get the probabilities from zero.
760
+ max_buckets[ 7 ] , max_buckets[ 6 ] , max_buckets[ 5 ] , max_buckets[ 4 ] ,
761
+ max_buckets[ 3 ] , max_buckets[ 2 ] , max_buckets[ 1 ] , max_buckets[ 0 ] ) ;
729
762
} else {
730
763
log_debug ! ( self . logger, "No amount known for SCID {} from {:?} to {:?}" , scid, source, target) ;
731
764
}
@@ -756,6 +789,53 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
756
789
None
757
790
}
758
791
792
+ /// Query the historical estimated minimum and maximum liquidity available for sending a
793
+ /// payment over the channel with `scid` towards the given `target` node.
794
+ ///
795
+ /// Returns two sets of 8 buckets. The first set describes the octiles for lower-bound
796
+ /// liquidity estimates, the second set describes the octiles for upper-bound liquidity
797
+ /// estimates. Each bucket describes the relative frequency at which we've seen a liquidity
798
+ /// bound in the octile relative to the channel's total capacity, on an arbitrary scale.
799
+ /// Because the values are slowly decayed, more recent data points are weighted more heavily
800
+ /// than older datapoints.
801
+ ///
802
+ /// When scoring, the estimated probability that an upper-/lower-bound lies in a given octile
803
+ /// relative to the channel's total capacity is calculated by dividing that bucket's value with
804
+ /// the total of all buckets for the given bound.
805
+ ///
806
+ /// For example, a value of `[0, 0, 0, 0, 0, 0, 32]` indicates that we believe the probability
807
+ /// of a bound being in the top octile to be 100%, and have never (recently) seen it in any
808
+ /// other octiles. A value of `[31, 0, 0, 0, 0, 0, 0, 32]` indicates we've seen the bound being
809
+ /// both in the top and bottom octile, and roughly with similar (recent) frequency.
810
+ ///
811
+ /// Because the datapoints are decayed slowly over time, values will eventually return to
812
+ /// `Some(([0; 8], [0; 8]))`.
813
+ pub fn historical_estimated_channel_liquidity_probabilities ( & self , scid : u64 , target : & NodeId )
814
+ -> Option < ( [ u16 ; 8 ] , [ u16 ; 8 ] ) > {
815
+ let graph = self . network_graph . read_only ( ) ;
816
+
817
+ if let Some ( chan) = graph. channels ( ) . get ( & scid) {
818
+ if let Some ( liq) = self . channel_liquidities . get ( & scid) {
819
+ if let Some ( ( directed_info, source) ) = chan. as_directed_to ( target) {
820
+ let amt = directed_info. effective_capacity ( ) . as_msat ( ) ;
821
+ let dir_liq = liq. as_directed ( source, target, amt, & self . params ) ;
822
+
823
+ let buckets = HistoricalMinMaxBuckets {
824
+ min_liquidity_offset_history : & dir_liq. min_liquidity_offset_history ,
825
+ max_liquidity_offset_history : & dir_liq. max_liquidity_offset_history ,
826
+ } ;
827
+ let ( min_buckets, mut max_buckets, _) = buckets. get_decayed_buckets ( T :: now ( ) ,
828
+ * dir_liq. last_updated , self . params . historical_no_updates_half_life ) ;
829
+ // Note that the liquidity buckets are an offset from the edge, so we inverse
830
+ // the max order to get the probabilities from zero.
831
+ max_buckets. reverse ( ) ;
832
+ return Some ( ( min_buckets, max_buckets) ) ;
833
+ }
834
+ }
835
+ }
836
+ None
837
+ }
838
+
759
839
/// Marks the node with the given `node_id` as banned, i.e.,
760
840
/// it will be avoided during path finding.
761
841
pub fn add_banned ( & mut self , node_id : & NodeId ) {
@@ -942,9 +1022,6 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
942
1022
943
1023
if params. historical_liquidity_penalty_multiplier_msat != 0 ||
944
1024
params. historical_liquidity_penalty_amount_multiplier_msat != 0 {
945
- let required_decays = self . now . duration_since ( * self . last_updated ) . as_secs ( )
946
- . checked_div ( params. historical_no_updates_half_life . as_secs ( ) )
947
- . map_or ( u32:: max_value ( ) , |decays| cmp:: min ( decays, u32:: max_value ( ) as u64 ) as u32 ) ;
948
1025
let payment_amt_64th_bucket = amount_msat * 64 / self . capacity_msat ;
949
1026
debug_assert ! ( payment_amt_64th_bucket <= 64 ) ;
950
1027
if payment_amt_64th_bucket > 64 { return res; }
@@ -954,7 +1031,9 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
954
1031
max_liquidity_offset_history : & self . max_liquidity_offset_history ,
955
1032
} ;
956
1033
if let Some ( cumulative_success_prob_times_billion) = buckets
957
- . calculate_success_probability_times_billion ( required_decays, payment_amt_64th_bucket as u8 ) {
1034
+ . calculate_success_probability_times_billion ( self . now , * self . last_updated ,
1035
+ params. historical_no_updates_half_life , payment_amt_64th_bucket as u8 )
1036
+ {
958
1037
let historical_negative_log10_times_2048 = approx:: negative_log10_times_2048 ( cumulative_success_prob_times_billion + 1 , 1024 * 1024 * 1024 ) ;
959
1038
res = res. saturating_add ( Self :: combined_penalty_msat ( amount_msat,
960
1039
historical_negative_log10_times_2048, params. historical_liquidity_penalty_multiplier_msat ,
@@ -2671,19 +2750,32 @@ mod tests {
2671
2750
} ;
2672
2751
// With no historical data the normal liquidity penalty calculation is used.
2673
2752
assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 47 ) ;
2753
+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2754
+ None ) ;
2674
2755
2675
2756
scorer. payment_path_failed ( & payment_path_for_amount ( 1 ) . iter ( ) . collect :: < Vec < _ > > ( ) , 42 ) ;
2676
2757
assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 2048 ) ;
2758
+ // The "it failed" increment is 32, where the probability should lie fully in the first
2759
+ // octile.
2760
+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2761
+ Some ( ( [ 32 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ] , [ 32 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ] ) ) ) ;
2677
2762
2678
2763
// Even after we tell the scorer we definitely have enough available liquidity, it will
2679
2764
// still remember that there was some failure in the past, and assign a non-0 penalty.
2680
2765
scorer. payment_path_failed ( & payment_path_for_amount ( 1000 ) . iter ( ) . collect :: < Vec < _ > > ( ) , 43 ) ;
2681
2766
assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 198 ) ;
2767
+ // The first octile should be decayed just slightly and the last octile has a new point.
2768
+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2769
+ Some ( ( [ 31 , 0 , 0 , 0 , 0 , 0 , 0 , 32 ] , [ 31 , 0 , 0 , 0 , 0 , 0 , 0 , 32 ] ) ) ) ;
2682
2770
2683
2771
// Advance the time forward 16 half-lives (which the docs claim will ensure all data is
2684
2772
// gone), and check that we're back to where we started.
2685
2773
SinceEpoch :: advance ( Duration :: from_secs ( 10 * 16 ) ) ;
2686
2774
assert_eq ! ( scorer. channel_penalty_msat( 42 , & source, & target, usage) , 47 ) ;
2775
+ // Once fully decayed we still have data, but its all-0s. In the future we may remove the
2776
+ // data entirely instead.
2777
+ assert_eq ! ( scorer. historical_estimated_channel_liquidity_probabilities( 42 , & target) ,
2778
+ Some ( ( [ 0 ; 8 ] , [ 0 ; 8 ] ) ) ) ;
2687
2779
}
2688
2780
2689
2781
#[ test]
0 commit comments