@@ -58,12 +58,12 @@ use crate::events::{
5858use crate::events::{FundingInfo, PaidBolt12Invoice};
5959// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6060// construct one themselves.
61- use crate::ln::channel::PendingV2Channel;
6261use crate::ln::channel::{
63- self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
62+ self, hold_time, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6463 OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6564 WithChannelContext,
6665};
66+ use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6767use crate::ln::channel_state::ChannelDetails;
6868use crate::ln::inbound_payment;
6969use crate::ln::msgs;
@@ -77,6 +77,7 @@ use crate::ln::onion_payment::{
7777 NextPacketDetails,
7878};
7979use crate::ln::onion_utils::{self};
80+ use crate::ln::onion_utils::{process_fulfill_attribution_data, AttributionData};
8081use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8182use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
8283#[cfg(test)]
@@ -7671,10 +7672,20 @@ where
76717672 pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
76727673 }
76737674 });
7675+
7676+ // Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7677+ // non-zero value will not make a difference in the penalty that may be applied by the sender.
7678+ let attribution_data = process_fulfill_attribution_data(
7679+ None,
7680+ &htlc.prev_hop.incoming_packet_shared_secret,
7681+ 0,
7682+ );
7683+
76747684 self.claim_funds_from_hop(
76757685 htlc.prev_hop,
76767686 payment_preimage,
76777687 payment_info.clone(),
7688+ Some(attribution_data),
76787689 |_, definitely_duplicate| {
76797690 debug_assert!(
76807691 !definitely_duplicate,
@@ -7719,7 +7730,8 @@ where
77197730 ) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77207731 >(
77217732 &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7722- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7733+ payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7734+ completion_action: ComplFunc,
77237735 ) {
77247736 let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
77257737 let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7732,7 +7744,13 @@ where
77327744 channel_id: prev_hop.channel_id,
77337745 htlc_id: prev_hop.htlc_id,
77347746 };
7735- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7747+ self.claim_mpp_part(
7748+ htlc_source,
7749+ payment_preimage,
7750+ payment_info,
7751+ attribution_data,
7752+ completion_action,
7753+ )
77367754 }
77377755
77387756 fn claim_mpp_part<
@@ -7742,7 +7760,8 @@ where
77427760 ) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
77437761 >(
77447762 &self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7745- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7763+ payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>,
7764+ completion_action: ComplFunc,
77467765 ) {
77477766 //TODO: Delay the claimed_funds relaying just like we do outbound relay!
77487767
@@ -7783,6 +7802,7 @@ where
77837802 prev_hop.htlc_id,
77847803 payment_preimage,
77857804 payment_info,
7805+ attribution_data,
77867806 &&logger,
77877807 );
77887808
@@ -7991,7 +8011,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
79918011 forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
79928012 startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
79938013 next_channel_outpoint: OutPoint, next_channel_id: ChannelId,
7994- next_user_channel_id: Option<u128>,
8014+ next_user_channel_id: Option<u128>, attribution_data: Option<&AttributionData>,
8015+ send_timestamp: Option<Duration>,
79958016 ) {
79968017 match source {
79978018 HTLCSource::OutboundRoute {
@@ -8023,10 +8044,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
80238044 let prev_node_id = hop_data.counterparty_node_id;
80248045 let completed_blocker =
80258046 RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8047+
8048+ // Obtain hold time, if available.
8049+ let now = duration_since_epoch();
8050+ let hold_time = hold_time(send_timestamp, now).unwrap_or(0);
8051+
8052+ // If attribution data was received from downstream, we shift it and get it ready for adding our hold
8053+ // time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8054+ // to record the hold time like we do for failed HTLCs.
8055+ let attribution_data = process_fulfill_attribution_data(
8056+ attribution_data,
8057+ &hop_data.incoming_packet_shared_secret,
8058+ hold_time,
8059+ );
8060+
80268061 self.claim_funds_from_hop(
80278062 hop_data,
80288063 payment_preimage,
80298064 None,
8065+ Some(attribution_data),
80308066 |htlc_claim_value_msat, definitely_duplicate| {
80318067 let chan_to_release = Some(EventUnblockedChannel {
80328068 counterparty_node_id: next_channel_counterparty_node_id,
@@ -9584,7 +9620,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
95849620 ) -> Result<(), MsgHandleErrInternal> {
95859621 let funding_txo;
95869622 let next_user_channel_id;
9587- let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9623+ let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp ) = {
95889624 let per_peer_state = self.per_peer_state.read().unwrap();
95899625 let peer_state_mutex = per_peer_state.get(counterparty_node_id).ok_or_else(|| {
95909626 debug_assert!(false);
@@ -9639,6 +9675,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96399675 funding_txo,
96409676 msg.channel_id,
96419677 Some(next_user_channel_id),
9678+ msg.attribution_data.as_ref(),
9679+ send_timestamp,
96429680 );
96439681
96449682 Ok(())
@@ -10462,6 +10500,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1046210500 "Claiming HTLC with preimage {} from our monitor",
1046310501 preimage
1046410502 );
10503+ // Claim the funds from the previous hop, if there is one. Because this is in response to a
10504+ // chain event, no attribution data is available.
1046510505 self.claim_funds_internal(
1046610506 htlc_update.source,
1046710507 preimage,
@@ -10473,6 +10513,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1047310513 funding_outpoint,
1047410514 channel_id,
1047510515 None,
10516+ None,
10517+ None,
1047610518 );
1047710519 } else {
1047810520 log_trace!(
@@ -16281,10 +16323,14 @@ where
1628116323 // Note that we don't need to pass the `payment_info` here - its
1628216324 // already (clearly) durably on disk in the `ChannelMonitor` so there's
1628316325 // no need to worry about getting it into others.
16326+ //
16327+ // We don't encode any attribution data, because the required onion shared secret isn't
16328+ // available here.
1628416329 channel_manager.claim_mpp_part(
1628516330 part.into(),
1628616331 payment_preimage,
1628716332 None,
16333+ None,
1628816334 |_, _| {
1628916335 (
1629016336 Some(MonitorUpdateCompletionAction::PaymentClaimed {
@@ -16429,6 +16475,7 @@ where
1642916475 // We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1643016476 // don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1643116477 // channel is closed we just assume that it probably came from an on-chain claim.
16478+ // The same holds for attribution data. We don't have any, so we pass an empty one.
1643216479 channel_manager.claim_funds_internal(
1643316480 source,
1643416481 preimage,
@@ -16440,6 +16487,8 @@ where
1644016487 downstream_funding,
1644116488 downstream_channel_id,
1644216489 None,
16490+ None,
16491+ None,
1644316492 );
1644416493 }
1644516494
0 commit comments