@@ -59,7 +59,7 @@ use crate::types::features::Bolt11InvoiceFeatures;
5959use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route};
6060use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data};
6161use crate::ln::msgs;
62- use crate::ln::onion_utils::{self};
62+ use crate::ln::onion_utils::{self, build_failure_packet, process_onion_success, HMAC_COUNT, HMAC_LEN, HOLD_TIME_LEN, MAX_HOPS };
6363use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
6464use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, DecodeError, LightningError, MessageSendEvent};
6565#[cfg(test)]
@@ -88,6 +88,8 @@ use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, Rea
8888use crate::util::logger::{Level, Logger, WithContext};
8989use crate::util::errors::APIError;
9090
91+ use crate::ln::onion_utils::AttributionData;
92+
9193#[cfg(async_payments)] use {
9294 crate::offers::offer::Amount,
9395 crate::offers::static_invoice::{DEFAULT_RELATIVE_EXPIRY as STATIC_INVOICE_DEFAULT_RELATIVE_EXPIRY, StaticInvoice, StaticInvoiceBuilder},
@@ -7239,8 +7241,19 @@ where
72397241 pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
72407242 }
72417243 });
7244+
7245+ log_info!(self.logger, "ONLY ONCE");
7246+
7247+ let mut attribution_data = AttributionData::new();
7248+ let hold_time_bytes: [u8; 4] = (100 as u32).to_be_bytes();
7249+ attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7250+ attribution_data.add_hmacs(&htlc.prev_hop.incoming_packet_shared_secret, &[]);
7251+ attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7252+
72427253 self.claim_funds_from_hop(
72437254 htlc.prev_hop, payment_preimage, payment_info.clone(),
7255+ Some(attribution_data),
7256+
72447257 |_, definitely_duplicate| {
72457258 debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
72467259 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7269,7 +7282,7 @@ where
72697282 ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72707283 >(
72717284 &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7272- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7285+ payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, completion_action: ComplFunc,
72737286 ) {
72747287 let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
72757288 let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7282,15 +7295,17 @@ where
72827295 channel_id: prev_hop.channel_id,
72837296 htlc_id: prev_hop.htlc_id,
72847297 };
7285- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7298+ self.claim_mpp_part(htlc_source, payment_preimage, payment_info, attribution_data, completion_action)
72867299 }
72877300
72887301 fn claim_mpp_part<
72897302 ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72907303 >(
72917304 &self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7292- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7305+ payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, completion_action: ComplFunc,
72937306 ) {
7307+ log_info!(self.logger, "claim_mpp_part called");
7308+
72947309 //TODO: Delay the claimed_funds relaying just like we do outbound relay!
72957310
72967311 // If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -7322,7 +7337,7 @@ where
73227337 if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
73237338 if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
73247339 let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7325- let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7340+ let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
73267341
73277342 match fulfill_res {
73287343 UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7474,9 +7489,17 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74747489 forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
74757490 startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
74767491 next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7492+ attribution_data: Option<&AttributionData>,
74777493 ) {
7494+ log_info!(self.logger, "claim_funds_internal - ONLY NON FINAL");
74787495 match source {
74797496 HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7497+ // Log attribution data.
7498+ log_info!(self.logger, "SENDER: Attribution data: {:?}", attribution_data);
7499+ process_onion_success(&self.secp_ctx, &self.logger, &path,
7500+ &session_priv, None,
7501+ attribution_data.unwrap().clone());
7502+
74807503 debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
74817504 "We don't support claim_htlc claims during startup - monitors may not be available yet");
74827505 debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7493,7 +7516,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74937516 let prev_user_channel_id = hop_data.user_channel_id;
74947517 let prev_node_id = hop_data.counterparty_node_id;
74957518 let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7496- self.claim_funds_from_hop(hop_data, payment_preimage, None,
7519+
7520+ let attribution_data = attribution_data
7521+ .map(|attribution_data| {
7522+ let mut attribution_data = attribution_data.clone();
7523+
7524+ attribution_data.shift_right();
7525+
7526+ // Fake hold time here.
7527+ let hold_time = hop_data.incoming_packet_shared_secret[0];
7528+
7529+ let hold_time_bytes: [u8; 4] = (10 + hold_time as u32).to_be_bytes();
7530+ attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7531+ attribution_data.add_hmacs(&hop_data.incoming_packet_shared_secret, &[]);
7532+ attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7533+
7534+ attribution_data
7535+ });
7536+
7537+ self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
74977538 |htlc_claim_value_msat, definitely_duplicate| {
74987539 let chan_to_release = Some(EventUnblockedChannel {
74997540 counterparty_node_id: next_channel_counterparty_node_id,
@@ -8937,7 +8978,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
89378978 };
89388979 self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
89398980 Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
8940- funding_txo, msg.channel_id, Some(next_user_channel_id),
8981+ funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
89418982 );
89428983
89438984 Ok(())
@@ -9638,6 +9679,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96389679 htlc_update.source, preimage,
96399680 htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
96409681 false, counterparty_node_id, funding_outpoint, channel_id, None,
9682+ None,
96419683 );
96429684 } else {
96439685 log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -12154,6 +12196,7 @@ where
1215412196 }
1215512197
1215612198 fn handle_update_fulfill_htlc(&self, counterparty_node_id: PublicKey, msg: &msgs::UpdateFulfillHTLC) {
12199+ log_info!(self.logger, "Received update_fulfill_htlc: {:?}", msg);
1215712200 let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
1215812201 let _ = handle_error!(self, self.internal_update_fulfill_htlc(&counterparty_node_id, msg), counterparty_node_id);
1215912202 }
@@ -14905,7 +14948,7 @@ where
1490514948 // already (clearly) durably on disk in the `ChannelMonitor` so there's
1490614949 // no need to worry about getting it into others.
1490714950 channel_manager.claim_mpp_part(
14908- part.into(), payment_preimage, None,
14951+ part.into(), payment_preimage, None, None,
1490914952 |_, _|
1491014953 (Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1491114954 );
@@ -15011,7 +15054,7 @@ where
1501115054 // channel is closed we just assume that it probably came from an on-chain claim.
1501215055 channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1501315056 downstream_closed, true, downstream_node_id, downstream_funding,
15014- downstream_channel_id, None
15057+ downstream_channel_id, None, None,
1501515058 );
1501615059 }
1501715060
0 commit comments