Skip to content

Commit 3f32f60

Browse files
committed
Expose historical bucket data via new accessors
Users should be able to view the data we use to score channels, so this exposes that data in new accessors. Fixes #1854.
1 parent 30060c1 commit 3f32f60

File tree

1 file changed

+81
-2
lines changed

1 file changed

+81
-2
lines changed

lightning/src/routing/scoring.rs

+81-2
Original file line numberDiff line numberDiff line change
@@ -731,15 +731,34 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
731731
/// Note that this writes roughly one line per channel for which we have a liquidity estimate,
732732
/// which may be a substantial amount of log output.
733733
pub fn debug_log_liquidity_stats(&self) {
734+
let now = T::now();
735+
734736
let graph = self.network_graph.read_only();
735737
for (scid, liq) in self.channel_liquidities.iter() {
736738
if let Some(chan_debug) = graph.channels().get(scid) {
737739
let log_direction = |source, target| {
738740
if let Some((directed_info, _)) = chan_debug.as_directed_to(target) {
739741
let amt = directed_info.effective_capacity().as_msat();
740742
let dir_liq = liq.as_directed(source, target, amt, &self.params);
741-
log_debug!(self.logger, "Liquidity from {:?} to {:?} via {} is in the range ({}, {})",
742-
source, target, scid, dir_liq.min_liquidity_msat(), dir_liq.max_liquidity_msat());
743+
744+
let buckets = HistoricalMinMaxBuckets {
745+
min_liquidity_offset_history: &dir_liq.min_liquidity_offset_history,
746+
max_liquidity_offset_history: &dir_liq.max_liquidity_offset_history,
747+
};
748+
let (min_buckets, max_buckets, _) = buckets.get_decayed_buckets(now,
749+
*dir_liq.last_updated, self.params.historical_no_updates_half_life);
750+
751+
log_debug!(self.logger, core::concat!(
752+
"Liquidity from {} to {} via {} is in the range ({}, {}).\n",
753+
"\tHistorical min liquidity octile relative probabilities: {} {} {} {} {} {} {} {}\n",
754+
"\tHistorical max liquidity octile relative probabilities: {} {} {} {} {} {} {} {}"),
755+
source, target, scid, dir_liq.min_liquidity_msat(), dir_liq.max_liquidity_msat(),
756+
min_buckets[0], min_buckets[1], min_buckets[2], min_buckets[3],
757+
min_buckets[4], min_buckets[5], min_buckets[6], min_buckets[7],
758+
// Note that the liquidity buckets are an offset from the edge, so we
759+
// inverse the max order to get the probabilities from zero.
760+
max_buckets[7], max_buckets[6], max_buckets[5], max_buckets[4],
761+
max_buckets[3], max_buckets[2], max_buckets[1], max_buckets[0]);
743762
} else {
744763
log_debug!(self.logger, "No amount known for SCID {} from {:?} to {:?}", scid, source, target);
745764
}
@@ -770,6 +789,53 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
770789
None
771790
}
772791

792+
/// Query the historical estimated minimum and maximum liquidity available for sending a
793+
/// payment over the channel with `scid` towards the given `target` node.
794+
///
795+
/// Returns two sets of 8 buckets. The first set describes the octiles for lower-bound
796+
/// liquidity estimates, the second set describes the octiles for upper-bound liquidity
797+
/// estimates. Each bucket describes the relative frequency at which we've seen a liquidity
798+
/// bound in the octile relative to the channel's total capacity, on an arbitrary scale.
799+
/// Because the values are slowly decayed, more recent data points are weighted more heavily
800+
/// than older datapoints.
801+
///
802+
/// When scoring, the estimated probability that an upper-/lower-bound lies in a given octile
803+
/// relative to the channel's total capacity is calculated by dividing that bucket's value with
804+
/// the total of all buckets for the given bound.
805+
///
806+
/// For example, a value of `[0, 0, 0, 0, 0, 0, 32]` indicates that we believe the probability
807+
/// of a bound being in the top octile to be 100%, and have never (recently) seen it in any
808+
/// other octiles. A value of `[31, 0, 0, 0, 0, 0, 0, 32]` indicates we've seen the bound being
809+
/// both in the top and bottom octile, and roughly with similar (recent) frequency.
810+
///
811+
/// Because the datapoints are decayed slowly over time, values will eventually return to
812+
/// `Some(([0; 8], [0; 8]))`.
813+
pub fn historical_estimated_channel_liquidity_probabilities(&self, scid: u64, target: &NodeId)
814+
-> Option<([u16; 8], [u16; 8])> {
815+
let graph = self.network_graph.read_only();
816+
817+
if let Some(chan) = graph.channels().get(&scid) {
818+
if let Some(liq) = self.channel_liquidities.get(&scid) {
819+
if let Some((directed_info, source)) = chan.as_directed_to(target) {
820+
let amt = directed_info.effective_capacity().as_msat();
821+
let dir_liq = liq.as_directed(source, target, amt, &self.params);
822+
823+
let buckets = HistoricalMinMaxBuckets {
824+
min_liquidity_offset_history: &dir_liq.min_liquidity_offset_history,
825+
max_liquidity_offset_history: &dir_liq.max_liquidity_offset_history,
826+
};
827+
let (min_buckets, mut max_buckets, _) = buckets.get_decayed_buckets(T::now(),
828+
*dir_liq.last_updated, self.params.historical_no_updates_half_life);
829+
// Note that the liquidity buckets are an offset from the edge, so we inverse
830+
// the max order to get the probabilities from zero.
831+
max_buckets.reverse();
832+
return Some((min_buckets, max_buckets));
833+
}
834+
}
835+
}
836+
None
837+
}
838+
773839
/// Marks the node with the given `node_id` as banned, i.e.,
774840
/// it will be avoided during path finding.
775841
pub fn add_banned(&mut self, node_id: &NodeId) {
@@ -2684,19 +2750,32 @@ mod tests {
26842750
};
26852751
// With no historical data the normal liquidity penalty calculation is used.
26862752
assert_eq!(scorer.channel_penalty_msat(42, &source, &target, usage), 47);
2753+
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
2754+
None);
26872755

26882756
scorer.payment_path_failed(&payment_path_for_amount(1).iter().collect::<Vec<_>>(), 42);
26892757
assert_eq!(scorer.channel_penalty_msat(42, &source, &target, usage), 2048);
2758+
// The "it failed" increment is 32, where the probability should lie fully in the first
2759+
// octile.
2760+
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
2761+
Some(([32, 0, 0, 0, 0, 0, 0, 0], [32, 0, 0, 0, 0, 0, 0, 0])));
26902762

26912763
// Even after we tell the scorer we definitely have enough available liquidity, it will
26922764
// still remember that there was some failure in the past, and assign a non-0 penalty.
26932765
scorer.payment_path_failed(&payment_path_for_amount(1000).iter().collect::<Vec<_>>(), 43);
26942766
assert_eq!(scorer.channel_penalty_msat(42, &source, &target, usage), 198);
2767+
// The first octile should be decayed just slightly and the last octile has a new point.
2768+
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
2769+
Some(([31, 0, 0, 0, 0, 0, 0, 32], [31, 0, 0, 0, 0, 0, 0, 32])));
26952770

26962771
// Advance the time forward 16 half-lives (which the docs claim will ensure all data is
26972772
// gone), and check that we're back to where we started.
26982773
SinceEpoch::advance(Duration::from_secs(10 * 16));
26992774
assert_eq!(scorer.channel_penalty_msat(42, &source, &target, usage), 47);
2775+
// Once fully decayed we still have data, but its all-0s. In the future we may remove the
2776+
// data entirely instead.
2777+
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
2778+
Some(([0; 8], [0; 8])));
27002779
}
27012780

27022781
#[test]

0 commit comments

Comments
 (0)