@@ -623,11 +623,17 @@ impl Readable for InterceptId {
623623}
624624
625625#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
626+ pub(crate) struct PreviousHopIdData {
627+ short_channel_id: u64,
628+ htlc_id: u64,
629+ }
630+
631+ #[derive(Clone, Debug, PartialEq, Eq, Hash)]
626632/// Uniquely describes an HTLC by its source. Just the guaranteed-unique subset of [`HTLCSource`].
627633pub(crate) enum SentHTLCId {
628634 PreviousHopData { short_channel_id: u64, htlc_id: u64 },
629635 OutboundRoute { session_priv: [u8; SECRET_KEY_SIZE] },
630- TrampolineForward { session_priv: [u8; SECRET_KEY_SIZE], previous_short_channel_id: u64, htlc_id: u64 }
636+ TrampolineForward { session_priv: [u8; SECRET_KEY_SIZE], previous_hop_data: Vec<PreviousHopIdData> }
631637}
632638impl SentHTLCId {
633639 pub(crate) fn from_source(source: &HTLCSource) -> Self {
@@ -640,12 +646,18 @@ impl SentHTLCId {
640646 Self::OutboundRoute { session_priv: session_priv.secret_bytes() },
641647 HTLCSource::TrampolineForward { previous_hop_data, session_priv, .. } => Self::TrampolineForward {
642648 session_priv: session_priv.secret_bytes(),
643- previous_short_channel_id: previous_hop_data.short_channel_id,
644- htlc_id: previous_hop_data.htlc_id,
649+ previous_hop_data: previous_hop_data.iter().map(|hop_data| PreviousHopIdData {
650+ short_channel_id: hop_data.short_channel_id,
651+ htlc_id: hop_data.htlc_id,
652+ }).collect(),
645653 },
646654 }
647655 }
648656}
657+ impl_writeable_tlv_based!(PreviousHopIdData, {
658+ (0, short_channel_id, required),
659+ (2, htlc_id, required),
660+ });
649661impl_writeable_tlv_based_enum!(SentHTLCId,
650662 (0, PreviousHopData) => {
651663 (0, short_channel_id, required),
@@ -656,8 +668,7 @@ impl_writeable_tlv_based_enum!(SentHTLCId,
656668 },
657669 (4, TrampolineForward) => {
658670 (0, session_priv, required),
659- (2, previous_short_channel_id, required),
660- (4, htlc_id, required),
671+ (2, previous_hop_data, required_vec),
661672 },
662673);
663674
@@ -670,7 +681,8 @@ mod fuzzy_channelmanager {
670681 pub enum HTLCSource {
671682 PreviousHopData(HTLCPreviousHopData),
672683 TrampolineForward {
673- previous_hop_data: HTLCPreviousHopData,
684+ // we might be forwarding an incoming payment that was received over MPP
685+ previous_hop_data: Vec<HTLCPreviousHopData>,
674686 incoming_trampoline_shared_secret: [u8; 32],
675687 hops: Vec<RouteHop>,
676688 session_priv: SecretKey,
@@ -6231,7 +6243,7 @@ where
62316243 let htlc_source = HTLCSource::TrampolineForward {
62326244 // dummy value
62336245 session_priv: SecretKey::from_slice(&self.entropy_source.get_secure_random_bytes()).unwrap(),
6234- previous_hop_data: HTLCPreviousHopData {
6246+ previous_hop_data: vec![ HTLCPreviousHopData {
62356247 short_channel_id: prev_short_channel_id,
62366248 user_channel_id: Some(prev_user_channel_id),
62376249 counterparty_node_id: prev_counterparty_node_id,
@@ -6243,7 +6255,7 @@ where
62436255 phantom_shared_secret: None,
62446256 blinded_failure: blinded.map(|b| b.failure),
62456257 cltv_expiry: Some(incoming_cltv_expiry),
6246- },
6258+ }] ,
62476259 incoming_trampoline_shared_secret,
62486260 hops: Vec::new(),
62496261 };
@@ -7303,7 +7315,7 @@ where
73037315 // Note that we MUST NOT end up calling methods on self.chain_monitor here - we're called
73047316 // from block_connected which may run during initialization prior to the chain_monitor
73057317 // being fully configured. See the docs for `ChannelManagerReadArgs` for more.
7306- let mut push_forward_event;
7318+ let mut push_forward_event = true ;
73077319 match source {
73087320 HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, .. } => {
73097321 push_forward_event = self.pending_outbound_payments.fail_htlc(source, payment_hash, onion_error, path,
@@ -7364,56 +7376,61 @@ where
73647376 // todo: what do we want to do with this given we do not wish to propagate it directly?
73657377 let _decoded_onion_failure = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
73667378
7367- let incoming_packet_shared_secret = previous_hop_data.incoming_packet_shared_secret;
7368- let channel_id = previous_hop_data.channel_id;
7369- let short_channel_id = previous_hop_data.short_channel_id;
7370- let htlc_id = previous_hop_data.htlc_id;
7371- let blinded_failure = previous_hop_data.blinded_failure;
7372- log_trace!(
7373- WithContext::from(&self.logger, None, Some(channel_id), Some(*payment_hash)),
7374- "Failing {}HTLC with payment_hash {} backwards from us following Trampoline forwarding failure: {:?}",
7375- if blinded_failure.is_some() { "blinded " } else { "" }, &payment_hash, onion_error
7376- );
7377- let failure = match blinded_failure {
7378- Some(BlindedFailure::FromIntroductionNode) => {
7379- let blinded_onion_error = HTLCFailReason::reason(INVALID_ONION_BLINDING, vec![0; 32]);
7380- let err_packet = blinded_onion_error.get_encrypted_failure_packet(
7381- &incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone())
7382- );
7383- HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
7384- },
7385- Some(BlindedFailure::FromBlindedNode) => {
7386- HTLCForwardInfo::FailMalformedHTLC {
7387- htlc_id,
7388- failure_code: INVALID_ONION_BLINDING,
7389- sha256_of_onion: [0; 32]
7379+ for current_hop_data in previous_hop_data {
7380+ let incoming_packet_shared_secret = current_hop_data.incoming_packet_shared_secret;
7381+ let channel_id = current_hop_data.channel_id;
7382+ let short_channel_id = current_hop_data.short_channel_id;
7383+ let htlc_id = current_hop_data.htlc_id;
7384+ let blinded_failure = current_hop_data.blinded_failure;
7385+ log_trace!(
7386+ WithContext::from(&self.logger, None, Some(channel_id), Some(*payment_hash)),
7387+ "Failing {}HTLC with payment_hash {} backwards from us following Trampoline forwarding failure: {:?}",
7388+ if blinded_failure.is_some() { "blinded " } else { "" }, &payment_hash, onion_error
7389+ );
7390+ let failure = match blinded_failure {
7391+ Some(BlindedFailure::FromIntroductionNode) => {
7392+ let blinded_onion_error = HTLCFailReason::reason(INVALID_ONION_BLINDING, vec![0; 32]);
7393+ let err_packet = blinded_onion_error.get_encrypted_failure_packet(
7394+ &incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone())
7395+ );
7396+ HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
7397+ },
7398+ Some(BlindedFailure::FromBlindedNode) => {
7399+ HTLCForwardInfo::FailMalformedHTLC {
7400+ htlc_id,
7401+ failure_code: INVALID_ONION_BLINDING,
7402+ sha256_of_onion: [0; 32]
7403+ }
7404+ },
7405+ None => {
7406+ let err_code = 0x2000 | 25;
7407+ let err_packet = HTLCFailReason::reason(err_code, Vec::new())
7408+ .get_encrypted_failure_packet(&incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone()));
7409+ HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
73907410 }
7391- },
7392- None => {
7393- let err_code = 0x2000 | 25;
7394- let err_packet = HTLCFailReason::reason(err_code, Vec::new())
7395- .get_encrypted_failure_packet(&incoming_packet_shared_secret, &Some(incoming_trampoline_shared_secret.clone()));
7396- HTLCForwardInfo::FailHTLC { htlc_id, err_packet }
7397- }
7398- };
7411+ };
73997412
7400- push_forward_event = self.decode_update_add_htlcs.lock().unwrap().is_empty();
7401- let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
7402- push_forward_event &= forward_htlcs.is_empty();
7403- match forward_htlcs.entry(short_channel_id) {
7404- hash_map::Entry::Occupied(mut entry) => {
7405- entry.get_mut().push(failure);
7406- },
7407- hash_map::Entry::Vacant(entry) => {
7408- entry.insert(vec!(failure));
7413+ push_forward_event = self.decode_update_add_htlcs.lock().unwrap().is_empty();
7414+ let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
7415+ push_forward_event &= forward_htlcs.is_empty();
7416+
7417+ match forward_htlcs.entry(short_channel_id) {
7418+ hash_map::Entry::Occupied(mut entry) => {
7419+ entry.get_mut().push(failure);
7420+ },
7421+ hash_map::Entry::Vacant(entry) => {
7422+ entry.insert(vec!(failure));
7423+ }
74097424 }
7425+
7426+ mem::drop(forward_htlcs);
7427+
7428+ let mut pending_events = self.pending_events.lock().unwrap();
7429+ pending_events.push_back((events::Event::HTLCHandlingFailed {
7430+ prev_channel_id: channel_id,
7431+ failed_next_destination: destination.clone(),
7432+ }, None));
74107433 }
7411- mem::drop(forward_htlcs);
7412- let mut pending_events = self.pending_events.lock().unwrap();
7413- pending_events.push_back((events::Event::HTLCHandlingFailed {
7414- prev_channel_id: channel_id,
7415- failed_next_destination: destination,
7416- }, None));
74177434 },
74187435 }
74197436 push_forward_event
@@ -7819,7 +7836,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78197836 session_priv, path, from_onchain, ev_completion_action, &self.pending_events,
78207837 &self.logger);
78217838 },
7822- HTLCSource::PreviousHopData(hop_data) | HTLCSource::TrampolineForward { previous_hop_data: hop_data, .. } => {
7839+ HTLCSource::PreviousHopData(hop_data) => {
78237840 let prev_channel_id = hop_data.channel_id;
78247841 let prev_user_channel_id = hop_data.user_channel_id;
78257842 let prev_node_id = hop_data.counterparty_node_id;
@@ -7874,6 +7891,63 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78747891 }
78757892 });
78767893 },
7894+ HTLCSource::TrampolineForward { previous_hop_data, .. } => {
7895+ for current_previous_hop_data in previous_hop_data {
7896+ let prev_channel_id = current_previous_hop_data.channel_id;
7897+ let prev_user_channel_id = current_previous_hop_data.user_channel_id;
7898+ let prev_node_id = current_previous_hop_data.counterparty_node_id;
7899+ let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(¤t_previous_hop_data);
7900+ self.claim_funds_from_hop(current_previous_hop_data, payment_preimage, None,
7901+ |htlc_claim_value_msat, definitely_duplicate| {
7902+ let chan_to_release = Some(EventUnblockedChannel {
7903+ counterparty_node_id: next_channel_counterparty_node_id,
7904+ funding_txo: next_channel_outpoint,
7905+ channel_id: next_channel_id,
7906+ blocking_action: completed_blocker,
7907+ });
7908+
7909+ if definitely_duplicate && startup_replay {
7910+ // On startup we may get redundant claims which are related to
7911+ // monitor updates still in flight. In that case, we shouldn't
7912+ // immediately free, but instead let that monitor update complete
7913+ // in the background.
7914+ (None, None)
7915+ } else if definitely_duplicate {
7916+ if let Some(other_chan) = chan_to_release {
7917+ (Some(MonitorUpdateCompletionAction::FreeOtherChannelImmediately {
7918+ downstream_counterparty_node_id: other_chan.counterparty_node_id,
7919+ downstream_funding_outpoint: other_chan.funding_txo,
7920+ downstream_channel_id: other_chan.channel_id,
7921+ blocking_action: other_chan.blocking_action,
7922+ }), None)
7923+ } else { (None, None) }
7924+ } else {
7925+ let total_fee_earned_msat = if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
7926+ if let Some(claimed_htlc_value) = htlc_claim_value_msat {
7927+ Some(claimed_htlc_value - forwarded_htlc_value)
7928+ } else { None }
7929+ } else { None };
7930+ debug_assert!(skimmed_fee_msat <= total_fee_earned_msat,
7931+ "skimmed_fee_msat must always be included in total_fee_earned_msat");
7932+ (Some(MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
7933+ event: events::Event::PaymentForwarded {
7934+ prev_channel_id: Some(prev_channel_id),
7935+ next_channel_id: Some(next_channel_id),
7936+ prev_user_channel_id,
7937+ next_user_channel_id,
7938+ prev_node_id,
7939+ next_node_id: Some(next_channel_counterparty_node_id),
7940+ total_fee_earned_msat,
7941+ skimmed_fee_msat,
7942+ claim_from_onchain_tx: from_onchain,
7943+ outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
7944+ },
7945+ downstream_counterparty_and_funding_outpoint: chan_to_release,
7946+ }), None)
7947+ }
7948+ });
7949+ }
7950+ },
78777951 }
78787952 }
78797953
@@ -13525,18 +13599,18 @@ impl Readable for HTLCSource {
1352513599 }
1352613600 1 => Ok(HTLCSource::PreviousHopData(Readable::read(reader)?)),
1352713601 2 => {
13528- let mut previous_hop_data: crate::util::ser::RequiredWrapper<HTLCPreviousHopData> = crate::util::ser::RequiredWrapper(None );
13602+ let mut previous_hop_data = Vec::new( );
1352913603 let mut incoming_trampoline_shared_secret: crate::util::ser::RequiredWrapper<[u8; 32]> = crate::util::ser::RequiredWrapper(None);
1353013604 let mut session_priv: crate::util::ser::RequiredWrapper<SecretKey> = crate::util::ser::RequiredWrapper(None);
1353113605 let mut hops = Vec::new();
1353213606 read_tlv_fields!(reader, {
13533- (0, previous_hop_data, required ),
13607+ (0, previous_hop_data, required_vec ),
1353413608 (2, incoming_trampoline_shared_secret, required),
1353513609 (4, session_priv, required),
1353613610 (6, hops, required_vec),
1353713611 });
1353813612 Ok(HTLCSource::TrampolineForward {
13539- previous_hop_data: previous_hop_data.0.unwrap() ,
13613+ previous_hop_data,
1354013614 incoming_trampoline_shared_secret: incoming_trampoline_shared_secret.0.unwrap(),
1354113615 hops,
1354213616 session_priv: session_priv.0.unwrap(),
@@ -13567,11 +13641,12 @@ impl Writeable for HTLCSource {
1356713641 1u8.write(writer)?;
1356813642 field.write(writer)?;
1356913643 }
13570- HTLCSource::TrampolineForward { ref previous_hop_data, ref incoming_trampoline_shared_secret, ref session_priv, hops: hops_ref } => {
13644+ HTLCSource::TrampolineForward { previous_hop_data: previous_hop_data_ref , ref incoming_trampoline_shared_secret, ref session_priv, hops: hops_ref } => {
1357113645 2u8.write(writer)?;
13646+ let previous_hop_data = previous_hop_data_ref.clone();
1357213647 let hops = hops_ref.clone();
1357313648 write_tlv_fields!(writer, {
13574- (0, previous_hop_data, required ),
13649+ (0, previous_hop_data, required_vec ),
1357513650 (2, incoming_trampoline_shared_secret, required),
1357613651 (4, session_priv, required),
1357713652 (6, hops, required_vec),
@@ -14710,7 +14785,7 @@ where
1471014785 for (htlc_source, (htlc, preimage_opt)) in monitor.get_all_current_outbound_htlcs() {
1471114786 let logger = WithChannelMonitor::from(&args.logger, monitor, Some(htlc.payment_hash));
1471214787 match htlc_source {
14713- HTLCSource::PreviousHopData(prev_hop_data) | HTLCSource::TrampolineForward { previous_hop_data: prev_hop_data, .. } => {
14788+ HTLCSource::PreviousHopData(prev_hop_data) => {
1471414789 let pending_forward_matches_htlc = |info: &PendingAddHTLCInfo| {
1471514790 info.prev_funding_outpoint == prev_hop_data.outpoint &&
1471614791 info.prev_htlc_id == prev_hop_data.htlc_id
@@ -14757,6 +14832,55 @@ where
1475714832 } else { true }
1475814833 });
1475914834 },
14835+ HTLCSource::TrampolineForward { previous_hop_data, .. } => {
14836+ for current_previous_hop_data in previous_hop_data {
14837+ let pending_forward_matches_htlc = |info: &PendingAddHTLCInfo| {
14838+ info.prev_funding_outpoint == current_previous_hop_data.outpoint &&
14839+ info.prev_htlc_id == current_previous_hop_data.htlc_id
14840+ };
14841+ // The ChannelMonitor is now responsible for this HTLC's
14842+ // failure/success and will let us know what its outcome is. If we
14843+ // still have an entry for this HTLC in `forward_htlcs` or
14844+ // `pending_intercepted_htlcs`, we were apparently not persisted after
14845+ // the monitor was when forwarding the payment.
14846+ decode_update_add_htlcs.retain(|scid, update_add_htlcs| {
14847+ update_add_htlcs.retain(|update_add_htlc| {
14848+ let matches = *scid == current_previous_hop_data.short_channel_id &&
14849+ update_add_htlc.htlc_id == current_previous_hop_data.htlc_id;
14850+ if matches {
14851+ log_info!(logger, "Removing pending to-decode HTLC with hash {} as it was forwarded to the closed channel {}",
14852+ &htlc.payment_hash, &monitor.channel_id());
14853+ }
14854+ !matches
14855+ });
14856+ !update_add_htlcs.is_empty()
14857+ });
14858+ forward_htlcs.retain(|_, forwards| {
14859+ forwards.retain(|forward| {
14860+ if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
14861+ if pending_forward_matches_htlc(&htlc_info) {
14862+ log_info!(logger, "Removing pending to-forward HTLC with hash {} as it was forwarded to the closed channel {}",
14863+ &htlc.payment_hash, &monitor.channel_id());
14864+ false
14865+ } else { true }
14866+ } else { true }
14867+ });
14868+ !forwards.is_empty()
14869+ });
14870+ pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
14871+ if pending_forward_matches_htlc(&htlc_info) {
14872+ log_info!(logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
14873+ &htlc.payment_hash, &monitor.channel_id());
14874+ pending_events_read.retain(|(event, _)| {
14875+ if let Event::HTLCIntercepted { intercept_id: ev_id, .. } = event {
14876+ intercepted_id != ev_id
14877+ } else { true }
14878+ });
14879+ false
14880+ } else { true }
14881+ });
14882+ }
14883+ }
1476014884 HTLCSource::OutboundRoute { payment_id, session_priv, path, .. } => {
1476114885 if let Some(preimage) = preimage_opt {
1476214886 let pending_events = Mutex::new(pending_events_read);
0 commit comments