Skip to content

Commit 5dd38b3

Browse files
committed
Hold time reporting
Adds hold time reporting for the final and intermediate nodes.
1 parent b155ac4 commit 5dd38b3

File tree

3 files changed

+106
-21
lines changed

3 files changed

+106
-21
lines changed

lightning/src/ln/channel.rs

Lines changed: 32 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6191,7 +6191,7 @@ where
61916191
assert!(!self.context.channel_state.can_generate_new_commitment());
61926192
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
61936193
let fulfill_resp =
6194-
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
6194+
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, None, logger);
61956195
self.context.latest_monitor_update_id = mon_update_id;
61966196
if let UpdateFulfillFetch::NewClaim { update_blocked, .. } = fulfill_resp {
61976197
assert!(update_blocked); // The HTLC must have ended up in the holding cell.
@@ -6200,7 +6200,8 @@ where
62006200

62016201
fn get_update_fulfill_htlc<L: Deref>(
62026202
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
6203-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6203+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6204+
logger: &L,
62046205
) -> UpdateFulfillFetch
62056206
where
62066207
L::Target: Logger,
@@ -6315,7 +6316,7 @@ where
63156316
self.context.holding_cell_htlc_updates.push(HTLCUpdateAwaitingACK::ClaimHTLC {
63166317
payment_preimage: payment_preimage_arg,
63176318
htlc_id: htlc_id_arg,
6318-
attribution_data: None,
6319+
attribution_data,
63196320
});
63206321
return UpdateFulfillFetch::NewClaim {
63216322
monitor_update,
@@ -6346,7 +6347,7 @@ where
63466347
);
63476348
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(
63486349
payment_preimage_arg.clone(),
6349-
None,
6350+
attribution_data,
63506351
));
63516352
}
63526353

@@ -6355,13 +6356,20 @@ where
63556356

63566357
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
63576358
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
6358-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6359+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6360+
logger: &L,
63596361
) -> UpdateFulfillCommitFetch
63606362
where
63616363
L::Target: Logger,
63626364
{
63636365
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
6364-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
6366+
match self.get_update_fulfill_htlc(
6367+
htlc_id,
6368+
payment_preimage,
6369+
payment_info,
6370+
attribution_data,
6371+
logger,
6372+
) {
63656373
UpdateFulfillFetch::NewClaim {
63666374
mut monitor_update,
63676375
htlc_value_msat,
@@ -6693,7 +6701,7 @@ where
66936701

66946702
pub fn update_fulfill_htlc(
66956703
&mut self, msg: &msgs::UpdateFulfillHTLC,
6696-
) -> Result<(HTLCSource, u64, Option<u64>), ChannelError> {
6704+
) -> Result<(HTLCSource, u64, Option<u64>, Option<Duration>), ChannelError> {
66976705
if self.context.channel_state.is_remote_stfu_sent()
66986706
|| self.context.channel_state.is_quiescent()
66996707
{
@@ -6713,8 +6721,9 @@ where
67136721
}
67146722

67156723
let outcome = OutboundHTLCOutcome::Success(msg.payment_preimage);
6716-
self.mark_outbound_htlc_removed(msg.htlc_id, outcome)
6717-
.map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat))
6724+
self.mark_outbound_htlc_removed(msg.htlc_id, outcome).map(|htlc| {
6725+
(htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat, htlc.send_timestamp)
6726+
})
67186727
}
67196728

67206729
#[rustfmt::skip]
@@ -7276,7 +7285,11 @@ where
72767285
}
72777286
None
72787287
},
7279-
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => {
7288+
&HTLCUpdateAwaitingACK::ClaimHTLC {
7289+
ref payment_preimage,
7290+
htlc_id,
7291+
ref attribution_data,
7292+
} => {
72807293
// If an HTLC claim was previously added to the holding cell (via
72817294
// `get_update_fulfill_htlc`, then generating the claim message itself must
72827295
// not fail - any in between attempts to claim the HTLC will have resulted
@@ -7289,8 +7302,13 @@ where
72897302
// We do not bother to track and include `payment_info` here, however.
72907303
let mut additional_monitor_update =
72917304
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } = self
7292-
.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger)
7293-
{
7305+
.get_update_fulfill_htlc(
7306+
htlc_id,
7307+
*payment_preimage,
7308+
None,
7309+
attribution_data.clone(),
7310+
logger,
7311+
) {
72947312
monitor_update
72957313
} else {
72967314
unreachable!()
@@ -13603,7 +13621,7 @@ where
1360313621
}
1360413622
}
1360513623

13606-
fn duration_since_epoch() -> Option<Duration> {
13624+
pub(crate) fn duration_since_epoch() -> Option<Duration> {
1360713625
#[cfg(not(feature = "std"))]
1360813626
let now = None;
1360913627

@@ -13619,7 +13637,7 @@ fn duration_since_epoch() -> Option<Duration> {
1361913637

1362013638
/// Returns the time expressed in hold time units (1 unit = 100 ms) that has elapsed between send_timestamp and now. If
1362113639
/// any of the arguments are `None`, returns `None`.
13622-
fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
13640+
pub(crate) fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
1362313641
send_timestamp.and_then(|t| {
1362413642
now.map(|now| {
1362513643
let elapsed = now.saturating_sub(t).as_millis() / HOLD_TIME_UNIT_MILLIS;

lightning/src/ln/channelmanager.rs

Lines changed: 56 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@ use crate::events::{
5858
use crate::events::{FundingInfo, PaidBolt12Invoice};
5959
// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6060
// construct one themselves.
61-
use crate::ln::channel::PendingV2Channel;
6261
use crate::ln::channel::{
63-
self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
62+
self, hold_time, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6463
OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6564
WithChannelContext,
6665
};
66+
use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6767
use crate::ln::channel_state::ChannelDetails;
6868
use crate::ln::inbound_payment;
6969
use crate::ln::msgs;
@@ -77,6 +77,7 @@ use crate::ln::onion_payment::{
7777
NextPacketDetails,
7878
};
7979
use crate::ln::onion_utils::{self};
80+
use crate::ln::onion_utils::{process_fulfill_attribution_data, AttributionData};
8081
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8182
use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
8283
#[cfg(test)]
@@ -7671,10 +7672,20 @@ where
76717672
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
76727673
}
76737674
});
7675+
7676+
// Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7677+
// non-zero value will not make a difference in the penalty that may be applied by the sender.
7678+
let attribution_data = process_fulfill_attribution_data(
7679+
None,
7680+
&htlc.prev_hop.incoming_packet_shared_secret,
7681+
0,
7682+
);
7683+
76747684
self.claim_funds_from_hop(
76757685
htlc.prev_hop,
76767686
payment_preimage,
76777687
payment_info.clone(),
7688+
Some(attribution_data),
76787689
|_, definitely_duplicate| {
76797690
debug_assert!(
76807691
!definitely_duplicate,
@@ -7719,7 +7730,8 @@ where
77197730
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77207731
>(
77217732
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7722-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7733+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7734+
completion_action: ComplFunc,
77237735
) {
77247736
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
77257737
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7732,7 +7744,13 @@ where
77327744
channel_id: prev_hop.channel_id,
77337745
htlc_id: prev_hop.htlc_id,
77347746
};
7735-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7747+
self.claim_mpp_part(
7748+
htlc_source,
7749+
payment_preimage,
7750+
payment_info,
7751+
attribution_data,
7752+
completion_action,
7753+
)
77367754
}
77377755

77387756
fn claim_mpp_part<
@@ -7742,7 +7760,8 @@ where
77427760
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77437761
>(
77447762
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7745-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7763+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7764+
completion_action: ComplFunc,
77467765
) {
77477766
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
77487767

@@ -7783,6 +7802,7 @@ where
77837802
prev_hop.htlc_id,
77847803
payment_preimage,
77857804
payment_info,
7805+
attribution_data,
77867806
&&logger,
77877807
);
77887808

@@ -7991,7 +8011,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
79918011
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
79928012
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
79938013
next_channel_outpoint: OutPoint, next_channel_id: ChannelId,
7994-
next_user_channel_id: Option<u128>,
8014+
next_user_channel_id: Option<u128>, attribution_data: Option<&AttributionData>,
8015+
send_timestamp: Option<Duration>,
79958016
) {
79968017
match source {
79978018
HTLCSource::OutboundRoute {
@@ -8023,10 +8044,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
80238044
let prev_node_id = hop_data.counterparty_node_id;
80248045
let completed_blocker =
80258046
RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8047+
8048+
// Obtain hold time, if available.
8049+
let now = duration_since_epoch();
8050+
let hold_time = hold_time(send_timestamp, now).unwrap_or(0);
8051+
8052+
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
8053+
// time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8054+
// to record the hold time like we do for failed HTLCs.
8055+
let attribution_data = process_fulfill_attribution_data(
8056+
attribution_data,
8057+
&hop_data.incoming_packet_shared_secret,
8058+
hold_time,
8059+
);
8060+
80268061
self.claim_funds_from_hop(
80278062
hop_data,
80288063
payment_preimage,
80298064
None,
8065+
Some(attribution_data),
80308066
|htlc_claim_value_msat, definitely_duplicate| {
80318067
let chan_to_release = Some(EventUnblockedChannel {
80328068
counterparty_node_id: next_channel_counterparty_node_id,
@@ -9584,7 +9620,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
95849620
) -> Result<(), MsgHandleErrInternal> {
95859621
let funding_txo;
95869622
let next_user_channel_id;
9587-
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9623+
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp) = {
95889624
let per_peer_state = self.per_peer_state.read().unwrap();
95899625
let peer_state_mutex = per_peer_state.get(counterparty_node_id).ok_or_else(|| {
95909626
debug_assert!(false);
@@ -9639,6 +9675,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96399675
funding_txo,
96409676
msg.channel_id,
96419677
Some(next_user_channel_id),
9678+
msg.attribution_data.as_ref(),
9679+
send_timestamp,
96429680
);
96439681

96449682
Ok(())
@@ -10462,6 +10500,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1046210500
"Claiming HTLC with preimage {} from our monitor",
1046310501
preimage
1046410502
);
10503+
// Claim the funds from the previous hop, if there is one. Because this is in response to a
10504+
// chain event, no attribution data is available.
1046510505
self.claim_funds_internal(
1046610506
htlc_update.source,
1046710507
preimage,
@@ -10473,6 +10513,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1047310513
funding_outpoint,
1047410514
channel_id,
1047510515
None,
10516+
None,
10517+
None,
1047610518
);
1047710519
} else {
1047810520
log_trace!(
@@ -16281,10 +16323,14 @@ where
1628116323
// Note that we don't need to pass the `payment_info` here - its
1628216324
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1628316325
// no need to worry about getting it into others.
16326+
//
16327+
// We don't encode any attribution data, because the required onion shared secret isn't
16328+
// available here.
1628416329
channel_manager.claim_mpp_part(
1628516330
part.into(),
1628616331
payment_preimage,
1628716332
None,
16333+
None,
1628816334
|_, _| {
1628916335
(
1629016336
Some(MonitorUpdateCompletionAction::PaymentClaimed {
@@ -16429,6 +16475,7 @@ where
1642916475
// We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1643016476
// don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1643116477
// channel is closed we just assume that it probably came from an on-chain claim.
16478+
// The same holds for attribution data. We don't have any, so we pass an empty one.
1643216479
channel_manager.claim_funds_internal(
1643316480
source,
1643416481
preimage,
@@ -16440,6 +16487,8 @@ where
1644016487
downstream_funding,
1644116488
downstream_channel_id,
1644216489
None,
16490+
None,
16491+
None,
1644316492
);
1644416493
}
1644516494

lightning/src/ln/onion_utils.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2814,6 +2814,24 @@ fn process_failure_packet(
28142814
update_attribution_data(onion_error, shared_secret, hold_time);
28152815
}
28162816

2817+
pub(crate) fn process_fulfill_attribution_data(
2818+
attribution_data: Option<&AttributionData>, shared_secret: &[u8], hold_time: u32,
2819+
) -> AttributionData {
2820+
let mut attribution_data =
2821+
attribution_data.map_or(AttributionData::new(), |attribution_data| {
2822+
let mut attribution_data = attribution_data.clone();
2823+
2824+
attribution_data.shift_right();
2825+
2826+
attribution_data
2827+
});
2828+
2829+
attribution_data.update(&[], &shared_secret, hold_time);
2830+
attribution_data.crypt(&shared_secret);
2831+
2832+
attribution_data
2833+
}
2834+
28172835
#[cfg(test)]
28182836
mod tests {
28192837
use core::iter;

0 commit comments

Comments
 (0)