Skip to content

Commit 4b35038

Browse files
committed
Avoid startup PeerState entries for peers with unfunded channels
If a peer creates a channel with us which never reaches the funding stage (or never gets any commitment updates after creation), we'll avoid inserting the `update_id` into `closed_channel_monitor_update_ids` at runtime to avoid keeping a `PeerState` entry around for no reason. However, on startup we still create a `ChannelMonitorUpdate` with a `ChannelForceClosed` update step to ensure the `ChannelMonitor` is locked and shut down. This is pretty redundant, and results in a bunch of on-startup `ChannelMonitorUpdate`s for any old but non-archived `ChannelMonitor`s. Instead, here, we check if a `ChannelMonitor` already saw a `ChannelForceClosed` update step before we generate the on-startup `ChannelMonitorUpdate`. This also allows us to skip the `closed_channel_monitor_update_ids` insertion as we can be confident we'll never have a `ChannelMonitorUpdate` for this channel at all.
1 parent 94a50a0 commit 4b35038

File tree

4 files changed

+10
-65
lines changed

4 files changed

+10
-65
lines changed

lightning/src/chain/channelmonitor.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1682,6 +1682,12 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
16821682
self.inner.lock().unwrap().get_cur_holder_commitment_number()
16831683
}
16841684

1685+
/// Gets whether we've been notified that this channel is closed by the `ChannelManager` (i.e.
1686+
/// via a [`ChannelMonitorUpdateStep::ChannelForceClosed`]).
1687+
pub(crate) fn offchain_closed(&self) -> bool {
1688+
self.inner.lock().unwrap().lockdown_from_offchain
1689+
}
1690+
16851691
/// Gets the `node_id` of the counterparty for this channel.
16861692
///
16871693
/// Will be `None` for channels constructed on LDK versions prior to 0.0.110 and always `Some`

lightning/src/ln/channelmanager.rs

Lines changed: 4 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -7123,8 +7123,6 @@ where
71237123
let prev_channel_id = hop_data.channel_id;
71247124
let prev_user_channel_id = hop_data.user_channel_id;
71257125
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7126-
#[cfg(debug_assertions)]
7127-
let claiming_chan_funding_outpoint = hop_data.outpoint;
71287126
self.claim_funds_from_hop(hop_data, payment_preimage,
71297127
|htlc_claim_value_msat, definitely_duplicate| {
71307128
let chan_to_release =
@@ -7149,61 +7147,6 @@ where
71497147
// monitor updates still in flight. In that case, we shouldn't
71507148
// immediately free, but instead let that monitor update complete
71517149
// in the background.
7152-
#[cfg(debug_assertions)] {
7153-
let background_events = self.pending_background_events.lock().unwrap();
7154-
// There should be a `BackgroundEvent` pending...
7155-
assert!(background_events.iter().any(|ev| {
7156-
match ev {
7157-
BackgroundEvent::MonitorUpdateRegeneratedOnStartup {
7158-
funding_txo, update, ..
7159-
} => {
7160-
if *funding_txo == claiming_chan_funding_outpoint {
7161-
// to apply a monitor update that blocked the claiming channel,
7162-
assert!(update.updates.iter().any(|upd|
7163-
if let ChannelMonitorUpdateStep::PaymentPreimage {
7164-
payment_preimage: update_preimage
7165-
} = upd {
7166-
payment_preimage == *update_preimage
7167-
} else {
7168-
false
7169-
}
7170-
), "{:?}", update);
7171-
true
7172-
} else if *funding_txo == next_channel_outpoint {
7173-
// or the channel we'd unblock is already closed,
7174-
assert!(update.updates.iter().any(|upd|
7175-
if let ChannelMonitorUpdateStep::ChannelForceClosed { .. } = upd {
7176-
true
7177-
} else {
7178-
false
7179-
}
7180-
), "{:?}", update);
7181-
true
7182-
} else { false }
7183-
},
7184-
// or the channel we'd unblock is already closed (for an
7185-
// old channel),
7186-
BackgroundEvent::ClosedMonitorUpdateRegeneratedOnStartup(
7187-
(funding_txo, _channel_id, monitor_update)
7188-
) => {
7189-
if *funding_txo == next_channel_outpoint {
7190-
assert_eq!(monitor_update.updates.len(), 1);
7191-
assert!(matches!(
7192-
monitor_update.updates[0],
7193-
ChannelMonitorUpdateStep::ChannelForceClosed { .. }
7194-
));
7195-
true
7196-
} else { false }
7197-
},
7198-
// or the monitor update has completed and will unblock
7199-
// immediately once we get going.
7200-
BackgroundEvent::MonitorUpdatesComplete {
7201-
channel_id, ..
7202-
} =>
7203-
*channel_id == prev_channel_id,
7204-
}
7205-
}), "{:?}", *background_events);
7206-
}
72077150
(None, None)
72087151
} else if definitely_duplicate {
72097152
if let Some(other_chan) = chan_to_release {
@@ -12483,6 +12426,10 @@ where
1248312426
}
1248412427

1248512428
for (funding_txo, monitor) in args.channel_monitors.iter() {
12429+
if monitor.offchain_closed() {
12430+
// We already appled a ChannelForceClosed update.
12431+
continue;
12432+
}
1248612433
if !funding_txo_set.contains(funding_txo) {
1248712434
let logger = WithChannelMonitor::from(&args.logger, monitor, None);
1248812435
let channel_id = monitor.channel_id();

lightning/src/ln/monitor_tests.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2300,9 +2300,6 @@ fn do_test_restored_packages_retry(check_old_monitor_retries_after_upgrade: bool
23002300

23012301
// Connecting more blocks should result in the HTLC transactions being rebroadcast.
23022302
connect_blocks(&nodes[0], 6);
2303-
if check_old_monitor_retries_after_upgrade {
2304-
check_added_monitors(&nodes[0], 1);
2305-
}
23062303
{
23072304
let txn = nodes[0].tx_broadcaster.txn_broadcast();
23082305
if !nodes[0].connect_style.borrow().skips_blocks() {
@@ -3023,7 +3020,6 @@ fn do_test_anchors_monitor_fixes_counterparty_payment_script_on_reload(confirm_c
30233020
// If we saw the commitment before our `counterparty_payment_script` was fixed, we'll never
30243021
// get the spendable output event for the `to_remote` output, so we'll need to get it
30253022
// manually via `get_spendable_outputs`.
3026-
check_added_monitors(&nodes[1], 1);
30273023
let outputs = get_monitor!(nodes[1], chan_id).get_spendable_outputs(&commitment_tx, commitment_tx_conf_height);
30283024
assert_eq!(outputs.len(), 1);
30293025
let spend_tx = nodes[1].keys_manager.backing.spend_spendable_outputs(

lightning/src/ln/payment_tests.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -992,7 +992,6 @@ fn do_test_completed_payment_not_retryable_on_reload(use_dust: bool) {
992992
nodes[1].node.peer_disconnected(nodes[0].node.get_our_node_id());
993993

994994
nodes[0].node.test_process_background_events();
995-
check_added_monitors(&nodes[0], 1);
996995

997996
let mut reconnect_args = ReconnectArgs::new(&nodes[0], &nodes[1]);
998997
reconnect_args.send_channel_ready = (true, true);
@@ -1022,7 +1021,6 @@ fn do_test_completed_payment_not_retryable_on_reload(use_dust: bool) {
10221021
nodes[1].node.peer_disconnected(nodes[0].node.get_our_node_id());
10231022

10241023
nodes[0].node.test_process_background_events();
1025-
check_added_monitors(&nodes[0], 1);
10261024

10271025
reconnect_nodes(ReconnectArgs::new(&nodes[0], &nodes[1]));
10281026

@@ -1161,7 +1159,6 @@ fn do_test_dup_htlc_onchain_doesnt_fail_on_reload(persist_manager_post_event: bo
11611159
let height = nodes[0].blocks.lock().unwrap().len() as u32 - 1;
11621160
nodes[0].chain_monitor.chain_monitor.block_connected(&claim_block, height);
11631161
assert!(nodes[0].node.get_and_clear_pending_events().is_empty());
1164-
check_added_monitors(&nodes[0], 1);
11651162
}
11661163

11671164
#[test]
@@ -3521,7 +3518,6 @@ fn do_no_missing_sent_on_reload(persist_manager_with_payment: bool, at_midpoint:
35213518
reload_node!(nodes[0], test_default_channel_config(), &nodes[0].node.encode(), &[&chan_0_monitor_serialized], persister_c, chain_monitor_c, nodes_0_deserialized_c);
35223519
let events = nodes[0].node.get_and_clear_pending_events();
35233520
assert!(events.is_empty());
3524-
check_added_monitors(&nodes[0], 1);
35253521
}
35263522

35273523
#[test]

0 commit comments

Comments
 (0)