Skip to content

Commit 5a39900

Browse files
committed
refactor hashmap to channelliquidities struct
Wrap the liquidities hash map into a struct so that decay and serialization functionality can be attached. This allows external data to be serialized into this struct and decayed to make it comparable and mergeable.
1 parent 3718da0 commit 5a39900

File tree

1 file changed

+84
-31
lines changed

1 file changed

+84
-31
lines changed

lightning/src/routing/scoring.rs

+84-31
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,84 @@ where L::Target: Logger {
474474
decay_params: ProbabilisticScoringDecayParameters,
475475
network_graph: G,
476476
logger: L,
477-
channel_liquidities: HashMap<u64, ChannelLiquidity>,
477+
channel_liquidities: ChannelLiquidities,
478+
}
479+
/// ChannelLiquidities contains live and historical liquidity bounds for each channel.
480+
pub struct ChannelLiquidities(HashMap<u64, ChannelLiquidity>);
481+
482+
impl ChannelLiquidities {
483+
fn new() -> Self {
484+
Self(new_hash_map())
485+
}
486+
487+
fn time_passed(&mut self, duration_since_epoch: Duration, decay_params: ProbabilisticScoringDecayParameters) {
488+
self.0.retain(|_scid, liquidity| {
489+
liquidity.min_liquidity_offset_msat =
490+
liquidity.decayed_offset(liquidity.min_liquidity_offset_msat, duration_since_epoch, decay_params);
491+
liquidity.max_liquidity_offset_msat =
492+
liquidity.decayed_offset(liquidity.max_liquidity_offset_msat, duration_since_epoch, decay_params);
493+
liquidity.last_updated = duration_since_epoch;
494+
495+
// TODO: Call decay multiple times.
496+
let elapsed_time =
497+
duration_since_epoch.saturating_sub(liquidity.offset_history_last_updated);
498+
if elapsed_time > decay_params.historical_no_updates_half_life {
499+
let half_life = decay_params.historical_no_updates_half_life.as_secs_f64();
500+
if half_life != 0.0 {
501+
liquidity.liquidity_history.decay_buckets(elapsed_time.as_secs_f64() / half_life);
502+
liquidity.offset_history_last_updated = duration_since_epoch;
503+
}
504+
}
505+
liquidity.min_liquidity_offset_msat != 0 || liquidity.max_liquidity_offset_msat != 0 ||
506+
liquidity.liquidity_history.has_datapoints()
507+
});
508+
}
509+
510+
fn get(&self, short_channel_id: &u64) -> Option<&ChannelLiquidity> {
511+
self.0.get(short_channel_id)
512+
}
513+
514+
fn get_mut(&mut self, short_channel_id: &u64) -> Option<&mut ChannelLiquidity> {
515+
self.0.get_mut(short_channel_id)
516+
}
517+
518+
fn insert(&mut self, short_channel_id: u64, liquidity: ChannelLiquidity) -> Option<ChannelLiquidity> {
519+
self.0.insert(short_channel_id, liquidity)
520+
}
521+
522+
fn iter(&self) -> impl Iterator<Item = (&u64, &ChannelLiquidity)> {
523+
self.0.iter()
524+
}
525+
526+
fn entry(&mut self, short_channel_id: u64) -> Entry<u64, ChannelLiquidity, RandomState> {
527+
self.0.entry(short_channel_id)
528+
}
529+
530+
fn serialized_length(&self) -> usize {
531+
self.0.serialized_length()
532+
}
533+
}
534+
535+
536+
impl Readable for ChannelLiquidities {
537+
#[inline]
538+
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
539+
let mut channel_liquidities = new_hash_map();
540+
read_tlv_fields!(r, {
541+
(0, channel_liquidities, required),
542+
});
543+
Ok(ChannelLiquidities(channel_liquidities))
544+
}
545+
}
546+
547+
impl Writeable for ChannelLiquidities {
548+
#[inline]
549+
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
550+
write_tlv_fields!(w, {
551+
(0, self.0, required),
552+
});
553+
Ok(())
554+
}
478555
}
479556

480557
/// Parameters for configuring [`ProbabilisticScorer`].
@@ -804,7 +881,7 @@ impl ProbabilisticScoringDecayParameters {
804881
/// first node in the ordering of the channel's counterparties. Thus, swapping the two liquidity
805882
/// offset fields gives the opposite direction.
806883
#[repr(C)] // Force the fields in memory to be in the order we specify
807-
struct ChannelLiquidity {
884+
pub struct ChannelLiquidity {
808885
/// Lower channel liquidity bound in terms of an offset from zero.
809886
min_liquidity_offset_msat: u64,
810887

@@ -849,7 +926,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ProbabilisticScorer<G, L> whe
849926
decay_params,
850927
network_graph,
851928
logger,
852-
channel_liquidities: new_hash_map(),
929+
channel_liquidities: ChannelLiquidities::new(),
853930
}
854931
}
855932

@@ -1603,26 +1680,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreUpdate for Probabilistic
16031680
}
16041681

16051682
fn time_passed(&mut self, duration_since_epoch: Duration) {
1606-
let decay_params = self.decay_params;
1607-
self.channel_liquidities.retain(|_scid, liquidity| {
1608-
liquidity.min_liquidity_offset_msat =
1609-
liquidity.decayed_offset(liquidity.min_liquidity_offset_msat, duration_since_epoch, decay_params);
1610-
liquidity.max_liquidity_offset_msat =
1611-
liquidity.decayed_offset(liquidity.max_liquidity_offset_msat, duration_since_epoch, decay_params);
1612-
liquidity.last_updated = duration_since_epoch;
1613-
1614-
let elapsed_time =
1615-
duration_since_epoch.saturating_sub(liquidity.offset_history_last_updated);
1616-
if elapsed_time > decay_params.historical_no_updates_half_life {
1617-
let half_life = decay_params.historical_no_updates_half_life.as_secs_f64();
1618-
if half_life != 0.0 {
1619-
liquidity.liquidity_history.decay_buckets(elapsed_time.as_secs_f64() / half_life);
1620-
liquidity.offset_history_last_updated = duration_since_epoch;
1621-
}
1622-
}
1623-
liquidity.min_liquidity_offset_msat != 0 || liquidity.max_liquidity_offset_msat != 0 ||
1624-
liquidity.liquidity_history.has_datapoints()
1625-
});
1683+
self.channel_liquidities.time_passed(duration_since_epoch, self.decay_params);
16261684
}
16271685
}
16281686

@@ -2061,14 +2119,12 @@ mod bucketed_history {
20612119
}
20622120
}
20632121
use bucketed_history::{LegacyHistoricalBucketRangeTracker, HistoricalBucketRangeTracker, DirectedHistoricalLiquidityTracker, HistoricalLiquidityTracker};
2122+
use hashbrown::hash_map::Entry;
20642123

20652124
impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> Writeable for ProbabilisticScorer<G, L> where L::Target: Logger {
20662125
#[inline]
20672126
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
2068-
write_tlv_fields!(w, {
2069-
(0, self.channel_liquidities, required),
2070-
});
2071-
Ok(())
2127+
self.channel_liquidities.write(w)
20722128
}
20732129
}
20742130

@@ -2079,10 +2135,7 @@ ReadableArgs<(ProbabilisticScoringDecayParameters, G, L)> for ProbabilisticScore
20792135
r: &mut R, args: (ProbabilisticScoringDecayParameters, G, L)
20802136
) -> Result<Self, DecodeError> {
20812137
let (decay_params, network_graph, logger) = args;
2082-
let mut channel_liquidities = new_hash_map();
2083-
read_tlv_fields!(r, {
2084-
(0, channel_liquidities, required),
2085-
});
2138+
let channel_liquidities = ChannelLiquidities::read(r)?;
20862139
Ok(Self {
20872140
decay_params,
20882141
network_graph,

0 commit comments

Comments
 (0)