Skip to content

Commit b7de07c

Browse files
committed
Extract claiming funds from previous hop to helper method
Move `HTLCSource::PreviousHopData` claiming logic into `claim_funds_from_previous_hop_internal` to prepare for trampoline routing reuse.
1 parent 98eb104 commit b7de07c

File tree

1 file changed

+148
-123
lines changed

1 file changed

+148
-123
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 148 additions & 123 deletions
Original file line numberDiff line numberDiff line change
@@ -8582,141 +8582,166 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
85828582
&self.logger,
85838583
);
85848584
},
8585-
HTLCSource::PreviousHopData(hop_data) => {
8586-
let prev_channel_id = hop_data.channel_id;
8587-
let prev_user_channel_id = hop_data.user_channel_id;
8588-
let prev_node_id = hop_data.counterparty_node_id;
8589-
let completed_blocker =
8590-
RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8591-
8592-
// Obtain hold time, if available.
8593-
let hold_time = hold_time_since(send_timestamp).unwrap_or(0);
8594-
8595-
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
8596-
// time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8597-
// to record the hold time like we do for failed HTLCs.
8598-
let attribution_data = process_fulfill_attribution_data(
8599-
attribution_data,
8600-
&hop_data.incoming_packet_shared_secret,
8601-
hold_time,
8602-
);
8585+
HTLCSource::PreviousHopData(hop_data) => self.claim_funds_from_previous_hop_internal(
8586+
payment_preimage,
8587+
forwarded_htlc_value_msat,
8588+
skimmed_fee_msat,
8589+
from_onchain,
8590+
startup_replay,
8591+
next_channel_counterparty_node_id,
8592+
next_channel_outpoint,
8593+
next_channel_id,
8594+
next_user_channel_id,
8595+
hop_data,
8596+
attribution_data,
8597+
send_timestamp,
8598+
),
8599+
HTLCSource::TrampolineForward { .. } => todo!(),
8600+
}
8601+
}
86038602

8604-
#[cfg(test)]
8605-
let claiming_chan_funding_outpoint = hop_data.outpoint;
8606-
self.claim_funds_from_hop(
8607-
hop_data,
8608-
payment_preimage,
8609-
None,
8610-
Some(attribution_data),
8611-
|htlc_claim_value_msat, definitely_duplicate| {
8612-
let chan_to_release = Some(EventUnblockedChannel {
8613-
counterparty_node_id: next_channel_counterparty_node_id,
8614-
funding_txo: next_channel_outpoint,
8615-
channel_id: next_channel_id,
8616-
blocking_action: completed_blocker,
8617-
});
8603+
fn claim_funds_from_previous_hop_internal(
8604+
&self, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>,
8605+
skimmed_fee_msat: Option<u64>, from_onchain: bool, startup_replay: bool,
8606+
next_channel_counterparty_node_id: PublicKey, next_channel_outpoint: OutPoint,
8607+
next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
8608+
hop_data: HTLCPreviousHopData, attribution_data: Option<AttributionData>,
8609+
send_timestamp: Option<Duration>,
8610+
) {
8611+
let prev_channel_id = hop_data.channel_id;
8612+
let prev_user_channel_id = hop_data.user_channel_id;
8613+
let prev_node_id = hop_data.counterparty_node_id;
8614+
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
8615+
8616+
// Obtain hold time, if available.
8617+
let hold_time = hold_time_since(send_timestamp).unwrap_or(0);
8618+
8619+
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
8620+
// time. Note that fulfilled HTLCs take a fast path to the incoming side. We don't need to wait for RAA
8621+
// to record the hold time like we do for failed HTLCs.
8622+
let attribution_data = process_fulfill_attribution_data(
8623+
attribution_data,
8624+
&hop_data.incoming_packet_shared_secret,
8625+
hold_time,
8626+
);
86188627

8619-
if definitely_duplicate && startup_replay {
8620-
// On startup we may get redundant claims which are related to
8621-
// monitor updates still in flight. In that case, we shouldn't
8622-
// immediately free, but instead let that monitor update complete
8623-
// in the background.
8624-
#[cfg(test)]
8625-
{
8626-
let per_peer_state = self.per_peer_state.deadlocking_read();
8627-
// The channel we'd unblock should already be closed, or...
8628-
let channel_closed = per_peer_state
8629-
.get(&next_channel_counterparty_node_id)
8630-
.map(|lck| lck.deadlocking_lock())
8631-
.map(|peer| !peer.channel_by_id.contains_key(&next_channel_id))
8632-
.unwrap_or(true);
8633-
let background_events =
8634-
self.pending_background_events.lock().unwrap();
8635-
// there should be a `BackgroundEvent` pending...
8636-
let matching_bg_event =
8637-
background_events.iter().any(|ev| {
8638-
match ev {
8639-
// to apply a monitor update that blocked the claiming channel,
8640-
BackgroundEvent::MonitorUpdateRegeneratedOnStartup {
8641-
funding_txo, update, ..
8642-
} => {
8643-
if *funding_txo == claiming_chan_funding_outpoint {
8644-
assert!(update.updates.iter().any(|upd|
8645-
if let ChannelMonitorUpdateStep::PaymentPreimage {
8628+
#[cfg(test)]
8629+
let claiming_chan_funding_outpoint = hop_data.outpoint;
8630+
self.claim_funds_from_hop(
8631+
hop_data,
8632+
payment_preimage,
8633+
None,
8634+
Some(attribution_data),
8635+
|htlc_claim_value_msat, definitely_duplicate| {
8636+
let chan_to_release = Some(EventUnblockedChannel {
8637+
counterparty_node_id: next_channel_counterparty_node_id,
8638+
funding_txo: next_channel_outpoint,
8639+
channel_id: next_channel_id,
8640+
blocking_action: completed_blocker,
8641+
});
8642+
8643+
if definitely_duplicate && startup_replay {
8644+
// On startup we may get redundant claims which are related to
8645+
// monitor updates still in flight. In that case, we shouldn't
8646+
// immediately free, but instead let that monitor update complete
8647+
// in the background.
8648+
#[cfg(test)]
8649+
{
8650+
let per_peer_state = self.per_peer_state.deadlocking_read();
8651+
// The channel we'd unblock should already be closed, or...
8652+
let channel_closed = per_peer_state
8653+
.get(&next_channel_counterparty_node_id)
8654+
.map(|lck| lck.deadlocking_lock())
8655+
.map(|peer| !peer.channel_by_id.contains_key(&next_channel_id))
8656+
.unwrap_or(true);
8657+
let background_events = self.pending_background_events.lock().unwrap();
8658+
// there should be a `BackgroundEvent` pending...
8659+
let matching_bg_event =
8660+
background_events.iter().any(|ev| {
8661+
match ev {
8662+
// to apply a monitor update that blocked the claiming channel,
8663+
BackgroundEvent::MonitorUpdateRegeneratedOnStartup {
8664+
funding_txo,
8665+
update,
8666+
..
8667+
} => {
8668+
if *funding_txo == claiming_chan_funding_outpoint {
8669+
assert!(
8670+
update.updates.iter().any(|upd| {
8671+
if let ChannelMonitorUpdateStep::PaymentPreimage {
86468672
payment_preimage: update_preimage, ..
86478673
} = upd {
86488674
payment_preimage == *update_preimage
86498675
} else { false }
8650-
), "{:?}", update);
8651-
true
8652-
} else { false }
8653-
},
8654-
// or the monitor update has completed and will unblock
8655-
// immediately once we get going.
8656-
BackgroundEvent::MonitorUpdatesComplete {
8657-
channel_id, ..
8658-
} =>
8659-
*channel_id == prev_channel_id,
8676+
}),
8677+
"{:?}",
8678+
update
8679+
);
8680+
true
8681+
} else {
8682+
false
86608683
}
8661-
});
8662-
assert!(
8663-
channel_closed || matching_bg_event,
8664-
"{:?}",
8665-
*background_events
8666-
);
8667-
}
8668-
(None, None)
8669-
} else if definitely_duplicate {
8670-
if let Some(other_chan) = chan_to_release {
8671-
(Some(MonitorUpdateCompletionAction::FreeOtherChannelImmediately {
8672-
downstream_counterparty_node_id: other_chan.counterparty_node_id,
8673-
downstream_channel_id: other_chan.channel_id,
8674-
blocking_action: other_chan.blocking_action,
8675-
}), None)
8684+
},
8685+
// or the monitor update has completed and will unblock
8686+
// immediately once we get going.
8687+
BackgroundEvent::MonitorUpdatesComplete {
8688+
channel_id, ..
8689+
} => *channel_id == prev_channel_id,
8690+
}
8691+
});
8692+
assert!(channel_closed || matching_bg_event, "{:?}", *background_events);
8693+
}
8694+
(None, None)
8695+
} else if definitely_duplicate {
8696+
if let Some(other_chan) = chan_to_release {
8697+
(
8698+
Some(MonitorUpdateCompletionAction::FreeOtherChannelImmediately {
8699+
downstream_counterparty_node_id: other_chan.counterparty_node_id,
8700+
downstream_channel_id: other_chan.channel_id,
8701+
blocking_action: other_chan.blocking_action,
8702+
}),
8703+
None,
8704+
)
8705+
} else {
8706+
(None, None)
8707+
}
8708+
} else {
8709+
let total_fee_earned_msat =
8710+
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
8711+
if let Some(claimed_htlc_value) = htlc_claim_value_msat {
8712+
Some(claimed_htlc_value - forwarded_htlc_value)
86768713
} else {
8677-
(None, None)
8714+
None
86788715
}
86798716
} else {
8680-
let total_fee_earned_msat =
8681-
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
8682-
if let Some(claimed_htlc_value) = htlc_claim_value_msat {
8683-
Some(claimed_htlc_value - forwarded_htlc_value)
8684-
} else {
8685-
None
8686-
}
8687-
} else {
8688-
None
8689-
};
8690-
debug_assert!(
8691-
skimmed_fee_msat <= total_fee_earned_msat,
8692-
"skimmed_fee_msat must always be included in total_fee_earned_msat"
8693-
);
8694-
(
8695-
Some(MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
8696-
event: events::Event::PaymentForwarded {
8697-
prev_channel_id: Some(prev_channel_id),
8698-
next_channel_id: Some(next_channel_id),
8699-
prev_user_channel_id,
8700-
next_user_channel_id,
8701-
prev_node_id,
8702-
next_node_id: Some(next_channel_counterparty_node_id),
8703-
total_fee_earned_msat,
8704-
skimmed_fee_msat,
8705-
claim_from_onchain_tx: from_onchain,
8706-
outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
8707-
},
8708-
downstream_counterparty_and_funding_outpoint: chan_to_release,
8709-
}),
8710-
None,
8711-
)
8712-
}
8713-
},
8714-
);
8717+
None
8718+
};
8719+
debug_assert!(
8720+
skimmed_fee_msat <= total_fee_earned_msat,
8721+
"skimmed_fee_msat must always be included in total_fee_earned_msat"
8722+
);
8723+
(
8724+
Some(MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
8725+
event: events::Event::PaymentForwarded {
8726+
prev_channel_id: Some(prev_channel_id),
8727+
next_channel_id: Some(next_channel_id),
8728+
prev_user_channel_id,
8729+
next_user_channel_id,
8730+
prev_node_id,
8731+
next_node_id: Some(next_channel_counterparty_node_id),
8732+
total_fee_earned_msat,
8733+
skimmed_fee_msat,
8734+
claim_from_onchain_tx: from_onchain,
8735+
outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
8736+
},
8737+
downstream_counterparty_and_funding_outpoint: chan_to_release,
8738+
}),
8739+
None,
8740+
)
8741+
}
87158742
},
8716-
HTLCSource::TrampolineForward { .. } => todo!(),
8717-
}
8743+
)
87188744
}
8719-
87208745
/// Gets the node_id held by this ChannelManager
87218746
pub fn get_our_node_id(&self) -> PublicKey {
87228747
self.our_network_pubkey

0 commit comments

Comments
 (0)