Skip to content

Commit 72414f2

Browse files
committed
Hold time reporting
Adds hold time reporting for the final and intermediate nodes.
1 parent ba7deab commit 72414f2

File tree

6 files changed

+130
-23
lines changed

6 files changed

+130
-23
lines changed

lightning/src/ln/blinded_payment_tests.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2333,6 +2333,8 @@ fn test_trampoline_unblinded_receive() {
23332333
None,
23342334
).unwrap();
23352335

2336+
// Use a different session key to construct the replacement onion packet. Note that the sender isn't aware of
2337+
// this and won't be able to decode the fulfill hold times.
23362338
let outer_session_priv = secret_from_hex("e52c20461ed7acd46c4e7b591a37610519179482887bd73bf3b94617f8f03677");
23372339

23382340
let (outer_payloads, _, _) = onion_utils::build_onion_payloads(&route.paths[0], outer_total_msat, &recipient_onion_fields, outer_starting_htlc_offset, &None, None, Some(trampoline_packet)).unwrap();

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2890,7 +2890,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
28902890
as_raa = Some(get_event_msg!(nodes[0], MessageSendEvent::SendRevokeAndACK, node_b_id));
28912891
}
28922892

2893-
let fulfill_msg = msgs::UpdateFulfillHTLC {
2893+
let mut fulfill_msg = msgs::UpdateFulfillHTLC {
28942894
channel_id: chan_id_2,
28952895
htlc_id: 0,
28962896
payment_preimage,
@@ -2904,6 +2904,8 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
29042904
);
29052905
check_added_monitors!(nodes[2], 1);
29062906
get_htlc_update_msgs!(nodes[2], node_b_id);
2907+
// Note that we don't populate fulfill_msg.attribution_data here, which will lead to hold times being
2908+
// unavailable.
29072909
} else {
29082910
nodes[2].node.claim_funds(payment_preimage);
29092911
check_added_monitors!(nodes[2], 1);
@@ -2919,6 +2921,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
29192921
fulfill_msg.payment_preimage,
29202922
cs_updates.update_fulfill_htlcs[0].payment_preimage
29212923
);
2924+
fulfill_msg.attribution_data = cs_updates.update_fulfill_htlcs[0].attribution_data.clone();
29222925
}
29232926
nodes[1].node.handle_update_fulfill_htlc(node_c_id, &fulfill_msg);
29242927
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);

lightning/src/ln/channel.rs

Lines changed: 32 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6148,7 +6148,7 @@ where
61486148
assert!(!self.context.channel_state.can_generate_new_commitment());
61496149
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
61506150
let fulfill_resp =
6151-
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
6151+
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, None, logger);
61526152
self.context.latest_monitor_update_id = mon_update_id;
61536153
if let UpdateFulfillFetch::NewClaim { update_blocked, .. } = fulfill_resp {
61546154
assert!(update_blocked); // The HTLC must have ended up in the holding cell.
@@ -6157,7 +6157,8 @@ where
61576157

61586158
fn get_update_fulfill_htlc<L: Deref>(
61596159
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
6160-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6160+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6161+
logger: &L,
61616162
) -> UpdateFulfillFetch
61626163
where
61636164
L::Target: Logger,
@@ -6272,7 +6273,7 @@ where
62726273
self.context.holding_cell_htlc_updates.push(HTLCUpdateAwaitingACK::ClaimHTLC {
62736274
payment_preimage: payment_preimage_arg,
62746275
htlc_id: htlc_id_arg,
6275-
attribution_data: None,
6276+
attribution_data,
62766277
});
62776278
return UpdateFulfillFetch::NewClaim {
62786279
monitor_update,
@@ -6303,7 +6304,7 @@ where
63036304
);
63046305
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(
63056306
payment_preimage_arg.clone(),
6306-
None,
6307+
attribution_data,
63076308
));
63086309
}
63096310

@@ -6312,13 +6313,20 @@ where
63126313

63136314
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
63146315
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
6315-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6316+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6317+
logger: &L,
63166318
) -> UpdateFulfillCommitFetch
63176319
where
63186320
L::Target: Logger,
63196321
{
63206322
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
6321-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
6323+
match self.get_update_fulfill_htlc(
6324+
htlc_id,
6325+
payment_preimage,
6326+
payment_info,
6327+
attribution_data,
6328+
logger,
6329+
) {
63226330
UpdateFulfillFetch::NewClaim {
63236331
mut monitor_update,
63246332
htlc_value_msat,
@@ -6650,7 +6658,7 @@ where
66506658

66516659
pub fn update_fulfill_htlc(
66526660
&mut self, msg: &msgs::UpdateFulfillHTLC,
6653-
) -> Result<(HTLCSource, u64, Option<u64>), ChannelError> {
6661+
) -> Result<(HTLCSource, u64, Option<u64>, Option<Duration>), ChannelError> {
66546662
if self.context.channel_state.is_remote_stfu_sent()
66556663
|| self.context.channel_state.is_quiescent()
66566664
{
@@ -6670,8 +6678,9 @@ where
66706678
}
66716679

66726680
let outcome = OutboundHTLCOutcome::Success(msg.payment_preimage);
6673-
self.mark_outbound_htlc_removed(msg.htlc_id, outcome)
6674-
.map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat))
6681+
self.mark_outbound_htlc_removed(msg.htlc_id, outcome).map(|htlc| {
6682+
(htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat, htlc.send_timestamp)
6683+
})
66756684
}
66766685

66776686
#[rustfmt::skip]
@@ -7231,7 +7240,11 @@ where
72317240
}
72327241
None
72337242
},
7234-
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => {
7243+
&HTLCUpdateAwaitingACK::ClaimHTLC {
7244+
ref payment_preimage,
7245+
htlc_id,
7246+
ref attribution_data,
7247+
} => {
72357248
// If an HTLC claim was previously added to the holding cell (via
72367249
// `get_update_fulfill_htlc`, then generating the claim message itself must
72377250
// not fail - any in between attempts to claim the HTLC will have resulted
@@ -7242,8 +7255,13 @@ where
72427255
// `ChannelMonitorUpdate` to the user, making this one redundant, however
72437256
// there's no harm in including the extra `ChannelMonitorUpdateStep` here.
72447257
// We do not bother to track and include `payment_info` here, however.
7245-
let fulfill =
7246-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger);
7258+
let fulfill = self.get_update_fulfill_htlc(
7259+
htlc_id,
7260+
*payment_preimage,
7261+
None,
7262+
attribution_data.clone(),
7263+
logger,
7264+
);
72477265
let mut additional_monitor_update =
72487266
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } = fulfill {
72497267
monitor_update
@@ -13540,7 +13558,7 @@ where
1354013558
}
1354113559
}
1354213560

13543-
fn duration_since_epoch() -> Option<Duration> {
13561+
pub(crate) fn duration_since_epoch() -> Option<Duration> {
1354413562
#[cfg(not(feature = "std"))]
1354513563
let now = None;
1354613564

@@ -13556,7 +13574,7 @@ fn duration_since_epoch() -> Option<Duration> {
1355613574

1355713575
/// Returns the time expressed in hold time units (1 unit = 100 ms) that has elapsed between send_timestamp and now. If
1355813576
/// any of the arguments are `None`, returns `None`.
13559-
fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
13577+
pub(crate) fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
1356013578
send_timestamp.and_then(|t| {
1356113579
now.map(|now| {
1356213580
let elapsed = now.saturating_sub(t).as_millis() / HOLD_TIME_UNIT_MILLIS;

lightning/src/ln/channelmanager.rs

Lines changed: 66 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@ use crate::events::{
5858
use crate::events::{FundingInfo, PaidBolt12Invoice};
5959
// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6060
// construct one themselves.
61-
use crate::ln::channel::PendingV2Channel;
6261
use crate::ln::channel::{
63-
self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
62+
self, hold_time, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6463
OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6564
WithChannelContext,
6665
};
66+
use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6767
use crate::ln::channel_state::ChannelDetails;
6868
use crate::ln::inbound_payment;
6969
use crate::ln::msgs;
@@ -77,6 +77,7 @@ use crate::ln::onion_payment::{
7777
NextPacketDetails,
7878
};
7979
use crate::ln::onion_utils::{self};
80+
use crate::ln::onion_utils::{process_fulfill_attribution_data, AttributionData};
8081
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8182
use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
8283
#[cfg(test)]
@@ -7938,10 +7939,30 @@ where
79387939
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
79397940
}
79407941
});
7942+
7943+
// Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7944+
// non-zero value will not make a difference in the penalty that may be applied by the sender. If there
7945+
// is a phantom hop, we need to double-process.
7946+
let attribution_data =
7947+
if let Some(phantom_secret) = htlc.prev_hop.phantom_shared_secret {
7948+
let attribution_data =
7949+
process_fulfill_attribution_data(None, &phantom_secret, 0);
7950+
Some(attribution_data)
7951+
} else {
7952+
None
7953+
};
7954+
7955+
let attribution_data = process_fulfill_attribution_data(
7956+
attribution_data.as_ref(),
7957+
&htlc.prev_hop.incoming_packet_shared_secret,
7958+
0,
7959+
);
7960+
79417961
self.claim_funds_from_hop(
79427962
htlc.prev_hop,
79437963
payment_preimage,
79447964
payment_info.clone(),
7965+
Some(attribution_data),
79457966
|_, definitely_duplicate| {
79467967
debug_assert!(
79477968
!definitely_duplicate,
@@ -7986,7 +8007,8 @@ where
79868007
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
79878008
>(
79888009
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7989-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
8010+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
8011+
completion_action: ComplFunc,
79908012
) {
79918013
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
79928014
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7999,7 +8021,13 @@ where
79998021
channel_id: prev_hop.channel_id,
80008022
htlc_id: prev_hop.htlc_id,
80018023
};
8002-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
8024+
self.claim_mpp_part(
8025+
htlc_source,
8026+
payment_preimage,
8027+
payment_info,
8028+
attribution_data,
8029+
completion_action,
8030+
)
80038031
}
80048032

80058033
fn claim_mpp_part<
@@ -8009,7 +8037,8 @@ where
80098037
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
80108038
>(
80118039
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
8012-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
8040+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
8041+
completion_action: ComplFunc,
80138042
) {
80148043
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
80158044

@@ -8050,6 +8079,7 @@ where
80508079
prev_hop.htlc_id,
80518080
payment_preimage,
80528081
payment_info,
8082+
attribution_data,
80538083
&&logger,
80548084
);
80558085

@@ -8258,7 +8288,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
82588288
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
82598289
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
82608290
next_channel_outpoint: OutPoint, next_channel_id: ChannelId,
8261-
next_user_channel_id: Option<u128>,
8291+
next_user_channel_id: Option<u128>, attribution_data: Option<&AttributionData>,
8292+
send_timestamp: Option<Duration>,
82628293
) {
82638294
match source {
82648295
HTLCSource::OutboundRoute {
@@ -8290,10 +8321,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
82908321
let prev_node_id = hop_data.counterparty_node_id;
82918322
let completed_blocker =
82928323
RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8324+
8325+
// Obtain hold time, if available.
8326+
let now = duration_since_epoch();
8327+
let hold_time = hold_time(send_timestamp, now).unwrap_or(0);
8328+
8329+
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
8330+
// time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8331+
// to record the hold time like we do for failed HTLCs.
8332+
let attribution_data = process_fulfill_attribution_data(
8333+
attribution_data,
8334+
&hop_data.incoming_packet_shared_secret,
8335+
hold_time,
8336+
);
8337+
82938338
self.claim_funds_from_hop(
82948339
hop_data,
82958340
payment_preimage,
82968341
None,
8342+
Some(attribution_data),
82978343
|htlc_claim_value_msat, definitely_duplicate| {
82988344
let chan_to_release = Some(EventUnblockedChannel {
82998345
counterparty_node_id: next_channel_counterparty_node_id,
@@ -9852,7 +9898,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
98529898
) -> Result<(), MsgHandleErrInternal> {
98539899
let funding_txo;
98549900
let next_user_channel_id;
9855-
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9901+
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp) = {
98569902
let per_peer_state = self.per_peer_state.read().unwrap();
98579903
let peer_state_mutex = per_peer_state.get(counterparty_node_id).ok_or_else(|| {
98589904
debug_assert!(false);
@@ -9907,6 +9953,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
99079953
funding_txo,
99089954
msg.channel_id,
99099955
Some(next_user_channel_id),
9956+
msg.attribution_data.as_ref(),
9957+
send_timestamp,
99109958
);
99119959

99129960
Ok(())
@@ -10800,6 +10848,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1080010848
"Claiming HTLC with preimage {} from our monitor",
1080110849
preimage
1080210850
);
10851+
// Claim the funds from the previous hop, if there is one. Because this is in response to a
10852+
// chain event, no attribution data is available.
1080310853
self.claim_funds_internal(
1080410854
htlc_update.source,
1080510855
preimage,
@@ -10811,6 +10861,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1081110861
funding_outpoint,
1081210862
channel_id,
1081310863
None,
10864+
None,
10865+
None,
1081410866
);
1081510867
} else {
1081610868
log_trace!(
@@ -16677,10 +16729,14 @@ where
1667716729
// Note that we don't need to pass the `payment_info` here - its
1667816730
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1667916731
// no need to worry about getting it into others.
16732+
//
16733+
// We don't encode any attribution data, because the required onion shared secret isn't
16734+
// available here.
1668016735
channel_manager.claim_mpp_part(
1668116736
part.into(),
1668216737
payment_preimage,
1668316738
None,
16739+
None,
1668416740
|_, _| {
1668516741
(
1668616742
Some(MonitorUpdateCompletionAction::PaymentClaimed {
@@ -16825,6 +16881,7 @@ where
1682516881
// We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1682616882
// don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1682716883
// channel is closed we just assume that it probably came from an on-chain claim.
16884+
// The same holds for attribution data. We don't have any, so we pass an empty one.
1682816885
channel_manager.claim_funds_internal(
1682916886
source,
1683016887
preimage,
@@ -16836,6 +16893,8 @@ where
1683616893
downstream_funding,
1683716894
downstream_channel_id,
1683816895
None,
16896+
None,
16897+
None,
1683916898
);
1684016899
}
1684116900

lightning/src/ln/functional_tests.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7748,7 +7748,8 @@ pub fn test_onion_value_mpp_set_calculation() {
77487748
assert_eq!(node.node.get_our_node_id(), payment_event.node_id);
77497749

77507750
if idx == 0 {
7751-
// routing node
7751+
// Manipulate the onion packet for the routing node. Note that we pick a dummy session_priv here. The sender
7752+
// won't be able to decode fulfill attribution data.
77527753
let session_priv = [3; 32];
77537754
let height = nodes[0].best_block_info().1;
77547755
let session_priv = SecretKey::from_slice(&session_priv).unwrap();

0 commit comments

Comments
 (0)