@@ -177,7 +177,7 @@ pub(super) enum HTLCForwardInfo {
177177}
178178
179179/// Tracks the inbound corresponding to an outbound HTLC
180- #[derive(Clone, Hash, PartialEq, Eq)]
180+ #[derive(Clone, Debug, Hash, PartialEq, Eq)]
181181pub(crate) struct HTLCPreviousHopData {
182182 // Note that this may be an outbound SCID alias for the associated channel.
183183 short_channel_id: u64,
@@ -283,7 +283,7 @@ impl Readable for InterceptId {
283283 }
284284}
285285
286- #[derive(Clone, Copy, PartialEq, Eq, Hash)]
286+ #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
287287/// Uniquely describes an HTLC by its source. Just the guaranteed-unique subset of [`HTLCSource`].
288288pub(crate) enum SentHTLCId {
289289 PreviousHopData { short_channel_id: u64, htlc_id: u64 },
@@ -314,7 +314,7 @@ impl_writeable_tlv_based_enum!(SentHTLCId,
314314
315315/// Tracks the inbound corresponding to an outbound HTLC
316316#[allow(clippy::derive_hash_xor_eq)] // Our Hash is faithful to the data, we just don't have SecretKey::hash
317- #[derive(Clone, PartialEq, Eq)]
317+ #[derive(Clone, Debug, PartialEq, Eq)]
318318pub(crate) enum HTLCSource {
319319 PreviousHopData(HTLCPreviousHopData),
320320 OutboundRoute {
@@ -656,7 +656,6 @@ pub(crate) enum RAAMonitorUpdateBlockingAction {
656656}
657657
658658impl RAAMonitorUpdateBlockingAction {
659- #[allow(unused)]
660659 fn from_prev_hop_data(prev_hop: &HTLCPreviousHopData) -> Self {
661660 Self::ForwardedPaymentInboundClaim {
662661 channel_id: prev_hop.outpoint.to_channel_id(),
@@ -5175,11 +5174,17 @@ where
51755174 self.pending_outbound_payments.finalize_claims(sources, &self.pending_events);
51765175 }
51775176
5178- fn claim_funds_internal(&self, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_outpoint: OutPoint) {
5177+ fn claim_funds_internal(&self, source: HTLCSource, payment_preimage: PaymentPreimage,
5178+ forwarded_htlc_value_msat: Option<u64>, from_onchain: bool,
5179+ next_channel_counterparty_node_id: Option<PublicKey>, next_channel_outpoint: OutPoint
5180+ ) {
51795181 match source {
51805182 HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
51815183 debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
51825184 "We don't support claim_htlc claims during startup - monitors may not be available yet");
5185+ if let Some(pubkey) = next_channel_counterparty_node_id {
5186+ debug_assert_eq!(pubkey, path.hops[0].pubkey);
5187+ }
51835188 let ev_completion_action = EventCompletionAction::ReleaseRAAChannelMonitorUpdate {
51845189 channel_funding_outpoint: next_channel_outpoint,
51855190 counterparty_node_id: path.hops[0].pubkey,
@@ -5190,6 +5195,7 @@ where
51905195 },
51915196 HTLCSource::PreviousHopData(hop_data) => {
51925197 let prev_outpoint = hop_data.outpoint;
5198+ let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
51935199 let res = self.claim_funds_from_hop(hop_data, payment_preimage,
51945200 |htlc_claim_value_msat| {
51955201 if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
@@ -5205,7 +5211,17 @@ where
52055211 next_channel_id: Some(next_channel_outpoint.to_channel_id()),
52065212 outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
52075213 },
5208- downstream_counterparty_and_funding_outpoint: None,
5214+ downstream_counterparty_and_funding_outpoint:
5215+ if let Some(node_id) = next_channel_counterparty_node_id {
5216+ Some((node_id, next_channel_outpoint, completed_blocker))
5217+ } else {
5218+ // We can only get `None` here if we are processing a
5219+ // `ChannelMonitor`-originated event, in which case we
5220+ // don't care about ensuring we wake the downstream
5221+ // channel's monitor updating - the channel is already
5222+ // closed.
5223+ None
5224+ },
52095225 })
52105226 } else { None }
52115227 });
@@ -6044,6 +6060,17 @@ where
60446060 hash_map::Entry::Occupied(mut chan_phase_entry) => {
60456061 if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
60466062 let res = try_chan_phase_entry!(self, chan.update_fulfill_htlc(&msg), chan_phase_entry);
6063+ if let HTLCSource::PreviousHopData(prev_hop) = &res.0 {
6064+ peer_state.actions_blocking_raa_monitor_updates.entry(msg.channel_id)
6065+ .or_insert_with(Vec::new)
6066+ .push(RAAMonitorUpdateBlockingAction::from_prev_hop_data(&prev_hop));
6067+ }
6068+ // Note that we do not need to push an `actions_blocking_raa_monitor_updates`
6069+ // entry here, even though we *do* need to block the next RAA monitor update.
6070+ // We do this instead in the `claim_funds_internal` by attaching a
6071+ // `ReleaseRAAChannelMonitorUpdate` action to the event generated when the
6072+ // outbound HTLC is claimed. This is guaranteed to all complete before we
6073+ // process the RAA as messages are processed from single peers serially.
60476074 funding_txo = chan.context.get_funding_txo().expect("We won't accept a fulfill until funded");
60486075 res
60496076 } else {
@@ -6054,7 +6081,7 @@ where
60546081 hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close(format!("Got a message for a channel from the wrong node! No such channel for the passed counterparty_node_id {}", counterparty_node_id), msg.channel_id))
60556082 }
60566083 };
6057- self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, funding_txo);
6084+ self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, Some(*counterparty_node_id), funding_txo);
60586085 Ok(())
60596086 }
60606087
@@ -6256,6 +6283,23 @@ where
62566283 })
62576284 }
62586285
6286+ #[cfg(any(test, feature = "_test_utils"))]
6287+ pub(crate) fn test_raa_monitor_updates_held(&self,
6288+ counterparty_node_id: PublicKey, channel_id: ChannelId
6289+ ) -> bool {
6290+ let per_peer_state = self.per_peer_state.read().unwrap();
6291+ if let Some(peer_state_mtx) = per_peer_state.get(&counterparty_node_id) {
6292+ let mut peer_state_lck = peer_state_mtx.lock().unwrap();
6293+ let peer_state = &mut *peer_state_lck;
6294+
6295+ if let Some(chan) = peer_state.channel_by_id.get(&channel_id) {
6296+ return self.raa_monitor_updates_held(&peer_state.actions_blocking_raa_monitor_updates,
6297+ chan.context().get_funding_txo().unwrap(), counterparty_node_id);
6298+ }
6299+ }
6300+ false
6301+ }
6302+
62596303 fn internal_revoke_and_ack(&self, counterparty_node_id: &PublicKey, msg: &msgs::RevokeAndACK) -> Result<(), MsgHandleErrInternal> {
62606304 let (htlcs_to_fail, res) = {
62616305 let per_peer_state = self.per_peer_state.read().unwrap();
@@ -6477,8 +6521,8 @@ where
64776521 match monitor_event {
64786522 MonitorEvent::HTLCEvent(htlc_update) => {
64796523 if let Some(preimage) = htlc_update.payment_preimage {
6480- log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", & preimage);
6481- self.claim_funds_internal(htlc_update.source, preimage, htlc_update.htlc_value_satoshis.map(|v| v * 1000), true, funding_outpoint);
6524+ log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", preimage);
6525+ self.claim_funds_internal(htlc_update.source, preimage, htlc_update.htlc_value_satoshis.map(|v| v * 1000), true, counterparty_node_id, funding_outpoint);
64826526 } else {
64836527 log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
64846528 let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
@@ -9298,6 +9342,7 @@ where
92989342 // downstream chan is closed (because we don't have a
92999343 // channel_id -> peer map entry).
93009344 counterparty_opt.is_none(),
9345+ counterparty_opt.cloned().or(monitor.get_counterparty_node_id()),
93019346 monitor.get_funding_txo().0))
93029347 } else { None }
93039348 } else {
@@ -9576,12 +9621,12 @@ where
95769621 channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
95779622 }
95789623
9579- for (source, preimage, downstream_value, downstream_closed, downstream_funding) in pending_claims_to_replay {
9624+ for (source, preimage, downstream_value, downstream_closed, downstream_node_id, downstream_funding) in pending_claims_to_replay {
95809625 // We use `downstream_closed` in place of `from_onchain` here just as a guess - we
95819626 // don't remember in the `ChannelMonitor` where we got a preimage from, but if the
95829627 // channel is closed we just assume that it probably came from an on-chain claim.
95839628 channel_manager.claim_funds_internal(source, preimage, Some(downstream_value),
9584- downstream_closed, downstream_funding);
9629+ downstream_closed, downstream_node_id, downstream_funding);
95859630 }
95869631
95879632 //TODO: Broadcast channel update for closed channels, but only after we've made a
0 commit comments