Skip to content

Commit 4861747

Browse files
committed
Hold time reporting
Adds hold time reporting for the final and intermediate nodes.
1 parent d8ae3d0 commit 4861747

File tree

6 files changed

+130
-23
lines changed

6 files changed

+130
-23
lines changed

lightning/src/ln/blinded_payment_tests.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2333,6 +2333,8 @@ fn test_trampoline_unblinded_receive() {
23332333
None,
23342334
).unwrap();
23352335

2336+
// Use a different session key to construct the replacement onion packet. Note that the sender isn't aware of
2337+
// this and won't be able to decode the fulfill hold times.
23362338
let outer_session_priv = secret_from_hex("e52c20461ed7acd46c4e7b591a37610519179482887bd73bf3b94617f8f03677");
23372339

23382340
let (outer_payloads, _, _) = onion_utils::build_onion_payloads(&route.paths[0], outer_total_msat, &recipient_onion_fields, outer_starting_htlc_offset, &None, None, Some(trampoline_packet)).unwrap();

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2887,7 +2887,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
28872887
as_raa = Some(get_event_msg!(nodes[0], MessageSendEvent::SendRevokeAndACK, node_b_id));
28882888
}
28892889

2890-
let fulfill_msg = msgs::UpdateFulfillHTLC {
2890+
let mut fulfill_msg = msgs::UpdateFulfillHTLC {
28912891
channel_id: chan_id_2,
28922892
htlc_id: 0,
28932893
payment_preimage,
@@ -2901,6 +2901,8 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
29012901
);
29022902
check_added_monitors!(nodes[2], 1);
29032903
get_htlc_update_msgs!(nodes[2], node_b_id);
2904+
// Note that we don't populate fulfill_msg.attribution_data here, which will lead to hold times being
2905+
// unavailable.
29042906
} else {
29052907
nodes[2].node.claim_funds(payment_preimage);
29062908
check_added_monitors!(nodes[2], 1);
@@ -2916,6 +2918,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
29162918
fulfill_msg.payment_preimage,
29172919
cs_updates.update_fulfill_htlcs[0].payment_preimage
29182920
);
2921+
fulfill_msg.attribution_data = cs_updates.update_fulfill_htlcs[0].attribution_data.clone();
29192922
}
29202923
nodes[1].node.handle_update_fulfill_htlc(node_c_id, &fulfill_msg);
29212924
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);

lightning/src/ln/channel.rs

Lines changed: 32 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6161,7 +6161,7 @@ where
61616161
assert!(!self.context.channel_state.can_generate_new_commitment());
61626162
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
61636163
let fulfill_resp =
6164-
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
6164+
self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, None, logger);
61656165
self.context.latest_monitor_update_id = mon_update_id;
61666166
if let UpdateFulfillFetch::NewClaim { update_blocked, .. } = fulfill_resp {
61676167
assert!(update_blocked); // The HTLC must have ended up in the holding cell.
@@ -6170,7 +6170,8 @@ where
61706170

61716171
fn get_update_fulfill_htlc<L: Deref>(
61726172
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
6173-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6173+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6174+
logger: &L,
61746175
) -> UpdateFulfillFetch
61756176
where
61766177
L::Target: Logger,
@@ -6285,7 +6286,7 @@ where
62856286
self.context.holding_cell_htlc_updates.push(HTLCUpdateAwaitingACK::ClaimHTLC {
62866287
payment_preimage: payment_preimage_arg,
62876288
htlc_id: htlc_id_arg,
6288-
attribution_data: None,
6289+
attribution_data,
62896290
});
62906291
return UpdateFulfillFetch::NewClaim {
62916292
monitor_update,
@@ -6316,7 +6317,7 @@ where
63166317
);
63176318
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(
63186319
payment_preimage_arg.clone(),
6319-
None,
6320+
attribution_data,
63206321
));
63216322
}
63226323

@@ -6325,13 +6326,20 @@ where
63256326

63266327
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
63276328
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
6328-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6329+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
6330+
logger: &L,
63296331
) -> UpdateFulfillCommitFetch
63306332
where
63316333
L::Target: Logger,
63326334
{
63336335
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
6334-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
6336+
match self.get_update_fulfill_htlc(
6337+
htlc_id,
6338+
payment_preimage,
6339+
payment_info,
6340+
attribution_data,
6341+
logger,
6342+
) {
63356343
UpdateFulfillFetch::NewClaim {
63366344
mut monitor_update,
63376345
htlc_value_msat,
@@ -6663,7 +6671,7 @@ where
66636671

66646672
pub fn update_fulfill_htlc(
66656673
&mut self, msg: &msgs::UpdateFulfillHTLC,
6666-
) -> Result<(HTLCSource, u64, Option<u64>), ChannelError> {
6674+
) -> Result<(HTLCSource, u64, Option<u64>, Option<Duration>), ChannelError> {
66676675
if self.context.channel_state.is_remote_stfu_sent()
66686676
|| self.context.channel_state.is_quiescent()
66696677
{
@@ -6683,8 +6691,9 @@ where
66836691
}
66846692

66856693
let outcome = OutboundHTLCOutcome::Success(msg.payment_preimage);
6686-
self.mark_outbound_htlc_removed(msg.htlc_id, outcome)
6687-
.map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat))
6694+
self.mark_outbound_htlc_removed(msg.htlc_id, outcome).map(|htlc| {
6695+
(htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat, htlc.send_timestamp)
6696+
})
66886697
}
66896698

66906699
#[rustfmt::skip]
@@ -7246,7 +7255,11 @@ where
72467255
}
72477256
None
72487257
},
7249-
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => {
7258+
&HTLCUpdateAwaitingACK::ClaimHTLC {
7259+
ref payment_preimage,
7260+
htlc_id,
7261+
ref attribution_data,
7262+
} => {
72507263
// If an HTLC claim was previously added to the holding cell (via
72517264
// `get_update_fulfill_htlc`, then generating the claim message itself must
72527265
// not fail - any in between attempts to claim the HTLC will have resulted
@@ -7257,8 +7270,13 @@ where
72577270
// `ChannelMonitorUpdate` to the user, making this one redundant, however
72587271
// there's no harm in including the extra `ChannelMonitorUpdateStep` here.
72597272
// We do not bother to track and include `payment_info` here, however.
7260-
let fulfill =
7261-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger);
7273+
let fulfill = self.get_update_fulfill_htlc(
7274+
htlc_id,
7275+
*payment_preimage,
7276+
None,
7277+
attribution_data.clone(),
7278+
logger,
7279+
);
72627280
let mut additional_monitor_update =
72637281
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } = fulfill {
72647282
monitor_update
@@ -13570,7 +13588,7 @@ where
1357013588
}
1357113589
}
1357213590

13573-
fn duration_since_epoch() -> Option<Duration> {
13591+
pub(crate) fn duration_since_epoch() -> Option<Duration> {
1357413592
#[cfg(not(feature = "std"))]
1357513593
let now = None;
1357613594

@@ -13586,7 +13604,7 @@ fn duration_since_epoch() -> Option<Duration> {
1358613604

1358713605
/// Returns the time expressed in hold time units (1 unit = 100 ms) that has elapsed between send_timestamp and now. If
1358813606
/// any of the arguments are `None`, returns `None`.
13589-
fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
13607+
pub(crate) fn hold_time(send_timestamp: Option<Duration>, now: Option<Duration>) -> Option<u32> {
1359013608
send_timestamp.and_then(|t| {
1359113609
now.map(|now| {
1359213610
let elapsed = now.saturating_sub(t).as_millis() / HOLD_TIME_UNIT_MILLIS;

lightning/src/ln/channelmanager.rs

Lines changed: 66 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@ use crate::events::{
5858
use crate::events::{FundingInfo, PaidBolt12Invoice};
5959
// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6060
// construct one themselves.
61-
use crate::ln::channel::PendingV2Channel;
6261
use crate::ln::channel::{
63-
self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
62+
self, hold_time, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6463
OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6564
WithChannelContext,
6665
};
66+
use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6767
use crate::ln::channel_state::ChannelDetails;
6868
use crate::ln::inbound_payment;
6969
use crate::ln::msgs;
@@ -77,6 +77,7 @@ use crate::ln::onion_payment::{
7777
NextPacketDetails,
7878
};
7979
use crate::ln::onion_utils::{self};
80+
use crate::ln::onion_utils::{process_fulfill_attribution_data, AttributionData};
8081
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8182
use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
8283
#[cfg(test)]
@@ -7691,10 +7692,30 @@ where
76917692
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
76927693
}
76937694
});
7695+
7696+
// Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7697+
// non-zero value will not make a difference in the penalty that may be applied by the sender. If there
7698+
// is a phantom hop, we need to double-process.
7699+
let attribution_data =
7700+
if let Some(phantom_secret) = htlc.prev_hop.phantom_shared_secret {
7701+
let attribution_data =
7702+
process_fulfill_attribution_data(None, &phantom_secret, 0);
7703+
Some(attribution_data)
7704+
} else {
7705+
None
7706+
};
7707+
7708+
let attribution_data = process_fulfill_attribution_data(
7709+
attribution_data.as_ref(),
7710+
&htlc.prev_hop.incoming_packet_shared_secret,
7711+
0,
7712+
);
7713+
76947714
self.claim_funds_from_hop(
76957715
htlc.prev_hop,
76967716
payment_preimage,
76977717
payment_info.clone(),
7718+
Some(attribution_data),
76987719
|_, definitely_duplicate| {
76997720
debug_assert!(
77007721
!definitely_duplicate,
@@ -7739,7 +7760,8 @@ where
77397760
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77407761
>(
77417762
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7742-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7763+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7764+
completion_action: ComplFunc,
77437765
) {
77447766
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
77457767
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7752,7 +7774,13 @@ where
77527774
channel_id: prev_hop.channel_id,
77537775
htlc_id: prev_hop.htlc_id,
77547776
};
7755-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7777+
self.claim_mpp_part(
7778+
htlc_source,
7779+
payment_preimage,
7780+
payment_info,
7781+
attribution_data,
7782+
completion_action,
7783+
)
77567784
}
77577785

77587786
fn claim_mpp_part<
@@ -7762,7 +7790,8 @@ where
77627790
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77637791
>(
77647792
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7765-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7793+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7794+
completion_action: ComplFunc,
77667795
) {
77677796
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
77687797

@@ -7803,6 +7832,7 @@ where
78037832
prev_hop.htlc_id,
78047833
payment_preimage,
78057834
payment_info,
7835+
attribution_data,
78067836
&&logger,
78077837
);
78087838

@@ -8011,7 +8041,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
80118041
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
80128042
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
80138043
next_channel_outpoint: OutPoint, next_channel_id: ChannelId,
8014-
next_user_channel_id: Option<u128>,
8044+
next_user_channel_id: Option<u128>, attribution_data: Option<&AttributionData>,
8045+
send_timestamp: Option<Duration>,
80158046
) {
80168047
match source {
80178048
HTLCSource::OutboundRoute {
@@ -8043,10 +8074,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
80438074
let prev_node_id = hop_data.counterparty_node_id;
80448075
let completed_blocker =
80458076
RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8077+
8078+
// Obtain hold time, if available.
8079+
let now = duration_since_epoch();
8080+
let hold_time = hold_time(send_timestamp, now).unwrap_or(0);
8081+
8082+
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
8083+
// time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8084+
// to record the hold time like we do for failed HTLCs.
8085+
let attribution_data = process_fulfill_attribution_data(
8086+
attribution_data,
8087+
&hop_data.incoming_packet_shared_secret,
8088+
hold_time,
8089+
);
8090+
80468091
self.claim_funds_from_hop(
80478092
hop_data,
80488093
payment_preimage,
80498094
None,
8095+
Some(attribution_data),
80508096
|htlc_claim_value_msat, definitely_duplicate| {
80518097
let chan_to_release = Some(EventUnblockedChannel {
80528098
counterparty_node_id: next_channel_counterparty_node_id,
@@ -9604,7 +9650,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96049650
) -> Result<(), MsgHandleErrInternal> {
96059651
let funding_txo;
96069652
let next_user_channel_id;
9607-
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9653+
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp) = {
96089654
let per_peer_state = self.per_peer_state.read().unwrap();
96099655
let peer_state_mutex = per_peer_state.get(counterparty_node_id).ok_or_else(|| {
96109656
debug_assert!(false);
@@ -9659,6 +9705,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96599705
funding_txo,
96609706
msg.channel_id,
96619707
Some(next_user_channel_id),
9708+
msg.attribution_data.as_ref(),
9709+
send_timestamp,
96629710
);
96639711

96649712
Ok(())
@@ -10482,6 +10530,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1048210530
"Claiming HTLC with preimage {} from our monitor",
1048310531
preimage
1048410532
);
10533+
// Claim the funds from the previous hop, if there is one. Because this is in response to a
10534+
// chain event, no attribution data is available.
1048510535
self.claim_funds_internal(
1048610536
htlc_update.source,
1048710537
preimage,
@@ -10493,6 +10543,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1049310543
funding_outpoint,
1049410544
channel_id,
1049510545
None,
10546+
None,
10547+
None,
1049610548
);
1049710549
} else {
1049810550
log_trace!(
@@ -16377,10 +16429,14 @@ where
1637716429
// Note that we don't need to pass the `payment_info` here - its
1637816430
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1637916431
// no need to worry about getting it into others.
16432+
//
16433+
// We don't encode any attribution data, because the required onion shared secret isn't
16434+
// available here.
1638016435
channel_manager.claim_mpp_part(
1638116436
part.into(),
1638216437
payment_preimage,
1638316438
None,
16439+
None,
1638416440
|_, _| {
1638516441
(
1638616442
Some(MonitorUpdateCompletionAction::PaymentClaimed {
@@ -16525,6 +16581,7 @@ where
1652516581
// We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1652616582
// don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1652716583
// channel is closed we just assume that it probably came from an on-chain claim.
16584+
// The same holds for attribution data. We don't have any, so we pass an empty one.
1652816585
channel_manager.claim_funds_internal(
1652916586
source,
1653016587
preimage,
@@ -16536,6 +16593,8 @@ where
1653616593
downstream_funding,
1653716594
downstream_channel_id,
1653816595
None,
16596+
None,
16597+
None,
1653916598
);
1654016599
}
1654116600

lightning/src/ln/functional_tests.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7763,7 +7763,8 @@ pub fn test_onion_value_mpp_set_calculation() {
77637763
assert_eq!(node.node.get_our_node_id(), payment_event.node_id);
77647764

77657765
if idx == 0 {
7766-
// routing node
7766+
// Manipulate the onion packet for the routing node. Note that we pick a dummy session_priv here. The sender
7767+
// won't be able to decode fulfill attribution data.
77677768
let session_priv = [3; 32];
77687769
let height = nodes[0].best_block_info().1;
77697770
let session_priv = SecretKey::from_slice(&session_priv).unwrap();

0 commit comments

Comments
 (0)