@@ -56,14 +56,15 @@ use crate::events::{
5656 InboundChannelFunds, PaymentFailureReason, ReplayEvent,
5757};
5858use crate::events::{FundingInfo, PaidBolt12Invoice};
59+ use crate::ln::onion_utils::process_onion_success;
5960// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6061// construct one themselves.
61- use crate::ln::channel::PendingV2Channel;
6262use crate::ln::channel::{
6363 self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6464 OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6565 WithChannelContext,
6666};
67+ use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6768use crate::ln::channel_state::ChannelDetails;
6869use crate::ln::inbound_payment;
6970use crate::ln::msgs;
@@ -76,6 +77,7 @@ use crate::ln::onion_payment::{
7677 decode_incoming_update_add_htlc_onion, invalid_payment_err_data, HopConnector, InboundHTLCErr,
7778 NextPacketDetails,
7879};
80+ use crate::ln::onion_utils::AttributionData;
7981use crate::ln::onion_utils::{self};
8082use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8183use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
@@ -7625,8 +7627,16 @@ where
76257627 pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
76267628 }
76277629 });
7630+
7631+ // Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7632+ // non-zero value will not make a difference in the penalty that may be applied by the sender.
7633+ let mut attribution_data = AttributionData::new();
7634+ attribution_data.update(&[], &htlc.prev_hop.incoming_packet_shared_secret, 0);
7635+ attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7636+
76287637 self.claim_funds_from_hop(
76297638 htlc.prev_hop, payment_preimage, payment_info.clone(),
7639+ attribution_data,
76307640 |_, definitely_duplicate| {
76317641 debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
76327642 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7658,7 +7668,8 @@ where
76587668 ) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
76597669 >(
76607670 &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7661- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7671+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData,
7672+ completion_action: ComplFunc,
76627673 ) {
76637674 let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
76647675 let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7671,15 +7682,21 @@ where
76717682 channel_id: prev_hop.channel_id,
76727683 htlc_id: prev_hop.htlc_id,
76737684 };
7674- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7685+ self.claim_mpp_part(
7686+ htlc_source,
7687+ payment_preimage,
7688+ payment_info,
7689+ attribution_data,
7690+ completion_action,
7691+ )
76757692 }
76767693
76777694 #[rustfmt::skip]
76787695 fn claim_mpp_part<
76797696 ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
76807697 >(
76817698 &self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7682- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7699+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
76837700 ) {
76847701 //TODO: Delay the claimed_funds relaying just like we do outbound relay!
76857702
@@ -7712,7 +7729,7 @@ where
77127729 if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
77137730 if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
77147731 let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7715- let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7732+ let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
77167733
77177734 match fulfill_res {
77187735 UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7865,9 +7882,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78657882 forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
78667883 startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
78677884 next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7885+ attribution_data: Option<&AttributionData>, send_timestamp: Option<Duration>,
78687886 ) {
78697887 match source {
78707888 HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7889+ // Extract the hold times for this fulfilled HTLC, if available.
7890+ if let Some(attribution_data) = attribution_data {
7891+ let _ = process_onion_success(&self.secp_ctx, &self.logger, &path,
7892+ &session_priv, attribution_data.clone());
7893+ }
7894+
78717895 debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
78727896 "We don't support claim_htlc claims during startup - monitors may not be available yet");
78737897 debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7884,7 +7908,31 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78847908 let prev_user_channel_id = hop_data.user_channel_id;
78857909 let prev_node_id = hop_data.counterparty_node_id;
78867910 let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7887- self.claim_funds_from_hop(hop_data, payment_preimage, None,
7911+
7912+ // If attribution data was received from downstream, we shift it and get it ready for adding our hold
7913+ // time.
7914+ let mut attribution_data = attribution_data
7915+ .map_or(AttributionData::new(), |attribution_data| {
7916+ let mut attribution_data = attribution_data.clone();
7917+
7918+ attribution_data.shift_right();
7919+
7920+ attribution_data
7921+ });
7922+
7923+ // Obtain hold time, if available.
7924+ let now = duration_since_epoch();
7925+ let hold_time = if let (Some(timestamp), Some(now)) = (send_timestamp, now) {
7926+ u32::try_from(now.saturating_sub(timestamp).as_millis()).unwrap_or(u32::MAX)
7927+ } else {
7928+ 0
7929+ };
7930+
7931+ // Finish attribution data by adding our hold time and crypting it.
7932+ attribution_data.update(&[], &hop_data.incoming_packet_shared_secret, hold_time);
7933+ attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7934+
7935+ self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
78887936 |htlc_claim_value_msat, definitely_duplicate| {
78897937 let chan_to_release = Some(EventUnblockedChannel {
78907938 counterparty_node_id: next_channel_counterparty_node_id,
@@ -9428,7 +9476,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
94289476 fn internal_update_fulfill_htlc(&self, counterparty_node_id: &PublicKey, msg: &msgs::UpdateFulfillHTLC) -> Result<(), MsgHandleErrInternal> {
94299477 let funding_txo;
94309478 let next_user_channel_id;
9431- let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9479+ let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp ) = {
94329480 let per_peer_state = self.per_peer_state.read().unwrap();
94339481 let peer_state_mutex = per_peer_state.get(counterparty_node_id)
94349482 .ok_or_else(|| {
@@ -9469,7 +9517,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
94699517 };
94709518 self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
94719519 Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
9472- funding_txo, msg.channel_id, Some(next_user_channel_id),
9520+ funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
9521+ send_timestamp,
94739522 );
94749523
94759524 Ok(())
@@ -10283,10 +10332,13 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1028310332 let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(channel_id), Some(htlc_update.payment_hash));
1028410333 if let Some(preimage) = htlc_update.payment_preimage {
1028510334 log_trace!(logger, "Claiming HTLC with preimage {} from our monitor", preimage);
10335+ // Claim the funds from the previous hop, if there is one. Because this is in response to a
10336+ // chain event, no attribution data is available.
1028610337 self.claim_funds_internal(
1028710338 htlc_update.source, preimage,
1028810339 htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
1028910340 false, counterparty_node_id, funding_outpoint, channel_id, None,
10341+ None, None,
1029010342 );
1029110343 } else {
1029210344 log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -15668,8 +15720,11 @@ where
1566815720 // Note that we don't need to pass the `payment_info` here - its
1566915721 // already (clearly) durably on disk in the `ChannelMonitor` so there's
1567015722 // no need to worry about getting it into others.
15723+ //
15724+ // We don't encode any attribution data, because the required onion shared secret isn't
15725+ // available here.
1567115726 channel_manager.claim_mpp_part(
15672- part.into(), payment_preimage, None,
15727+ part.into(), payment_preimage, None, AttributionData::new(),
1567315728 |_, _|
1567415729 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1567515730 );
@@ -15773,9 +15828,10 @@ where
1577315828 // We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1577415829 // don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1577515830 // channel is closed we just assume that it probably came from an on-chain claim.
15831+ // The same holds for attribution data. We don't have any, so we pass an empty one.
1577615832 channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1577715833 downstream_closed, true, downstream_node_id, downstream_funding,
15778- downstream_channel_id, None
15834+ downstream_channel_id, None, None, None,
1577915835 );
1578015836 }
1578115837
0 commit comments