@@ -59,7 +59,7 @@ use crate::types::features::Bolt11InvoiceFeatures;
5959use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route};
6060use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data};
6161use crate::ln::msgs;
62- use crate::ln::onion_utils::{self};
62+ use crate::ln::onion_utils::{self, process_onion_success, HOLD_TIME_LEN };
6363use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
6464use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, DecodeError, LightningError, MessageSendEvent};
6565#[cfg(test)]
@@ -88,6 +88,8 @@ use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, Rea
8888use crate::util::logger::{Level, Logger, WithContext};
8989use crate::util::errors::APIError;
9090
91+ use crate::ln::onion_utils::AttributionData;
92+
9193#[cfg(async_payments)] use {
9294 crate::offers::offer::Amount,
9395 crate::offers::static_invoice::{DEFAULT_RELATIVE_EXPIRY as STATIC_INVOICE_DEFAULT_RELATIVE_EXPIRY, StaticInvoice, StaticInvoiceBuilder},
@@ -7239,8 +7241,18 @@ where
72397241 pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
72407242 }
72417243 });
7244+
7245+ log_info!(self.logger, "ONLY ONCE");
7246+
7247+ let mut attribution_data = AttributionData::new();
7248+ let hold_time_bytes: [u8; 4] = (100 as u32).to_be_bytes();
7249+ attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7250+ attribution_data.add_hmacs(&htlc.prev_hop.incoming_packet_shared_secret, &[]);
7251+ attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7252+
72427253 self.claim_funds_from_hop(
72437254 htlc.prev_hop, payment_preimage, payment_info.clone(),
7255+ attribution_data,
72447256 |_, definitely_duplicate| {
72457257 debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
72467258 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7269,7 +7281,7 @@ where
72697281 ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72707282 >(
72717283 &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7272- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7284+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
72737285 ) {
72747286 let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
72757287 let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7282,15 +7294,17 @@ where
72827294 channel_id: prev_hop.channel_id,
72837295 htlc_id: prev_hop.htlc_id,
72847296 };
7285- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7297+ self.claim_mpp_part(htlc_source, payment_preimage, payment_info, attribution_data, completion_action)
72867298 }
72877299
72887300 fn claim_mpp_part<
72897301 ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72907302 >(
72917303 &self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7292- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7304+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
72937305 ) {
7306+ log_info!(self.logger, "claim_mpp_part called");
7307+
72947308 //TODO: Delay the claimed_funds relaying just like we do outbound relay!
72957309
72967310 // If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -7322,7 +7336,7 @@ where
73227336 if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
73237337 if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
73247338 let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7325- let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7339+ let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
73267340
73277341 match fulfill_res {
73287342 UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7474,9 +7488,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74747488 forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
74757489 startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
74767490 next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7491+ attribution_data: Option<&AttributionData>,
74777492 ) {
7493+ log_info!(self.logger, "claim_funds_internal - ONLY NON FINAL");
74787494 match source {
74797495 HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7496+ if let Some(attribution_data) = attribution_data {
7497+ process_onion_success(&self.secp_ctx, &self.logger, &path,
7498+ &session_priv, attribution_data.clone());
7499+ }
7500+
74807501 debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
74817502 "We don't support claim_htlc claims during startup - monitors may not be available yet");
74827503 debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7493,7 +7514,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74937514 let prev_user_channel_id = hop_data.user_channel_id;
74947515 let prev_node_id = hop_data.counterparty_node_id;
74957516 let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7496- self.claim_funds_from_hop(hop_data, payment_preimage, None,
7517+
7518+ let mut attribution_data = attribution_data
7519+ .map_or(AttributionData::new(), |attribution_data| {
7520+ let mut attribution_data = attribution_data.clone();
7521+
7522+ attribution_data.shift_right();
7523+
7524+ attribution_data
7525+ });
7526+
7527+ // Fake hold time here.
7528+ let hold_time = hop_data.incoming_packet_shared_secret[0];
7529+
7530+ let hold_time_bytes: [u8; 4] = (10 + hold_time as u32).to_be_bytes();
7531+ attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7532+ attribution_data.add_hmacs(&hop_data.incoming_packet_shared_secret, &[]);
7533+ attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7534+
7535+ self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
74977536 |htlc_claim_value_msat, definitely_duplicate| {
74987537 let chan_to_release = Some(EventUnblockedChannel {
74997538 counterparty_node_id: next_channel_counterparty_node_id,
@@ -8937,7 +8976,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
89378976 };
89388977 self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
89398978 Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
8940- funding_txo, msg.channel_id, Some(next_user_channel_id),
8979+ funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
89418980 );
89428981
89438982 Ok(())
@@ -9638,6 +9677,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96389677 htlc_update.source, preimage,
96399678 htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
96409679 false, counterparty_node_id, funding_outpoint, channel_id, None,
9680+ None,
96419681 );
96429682 } else {
96439683 log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -12154,6 +12194,7 @@ where
1215412194 }
1215512195
1215612196 fn handle_update_fulfill_htlc(&self, counterparty_node_id: PublicKey, msg: &msgs::UpdateFulfillHTLC) {
12197+ log_info!(self.logger, "Received update_fulfill_htlc: {:?}", msg);
1215712198 let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
1215812199 let _ = handle_error!(self, self.internal_update_fulfill_htlc(&counterparty_node_id, msg), counterparty_node_id);
1215912200 }
@@ -14905,7 +14946,7 @@ where
1490514946 // already (clearly) durably on disk in the `ChannelMonitor` so there's
1490614947 // no need to worry about getting it into others.
1490714948 channel_manager.claim_mpp_part(
14908- part.into(), payment_preimage, None,
14949+ part.into(), payment_preimage, None, AttributionData::new(),
1490914950 |_, _|
1491014951 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1491114952 );
@@ -15011,7 +15052,7 @@ where
1501115052 // channel is closed we just assume that it probably came from an on-chain claim.
1501215053 channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1501315054 downstream_closed, true, downstream_node_id, downstream_funding,
15014- downstream_channel_id, None
15055+ downstream_channel_id, None, None,
1501515056 );
1501615057 }
1501715058
0 commit comments