Skip to content

Commit 2fb7bba

Browse files
committed
Add missing pending channel maps checks in ChannelManager
One of a series of follow-up commits to address some issues found in PR 2077, where we split channels up into different maps and structs depending on phase in their life.
1 parent 86fd9e7 commit 2fb7bba

File tree

1 file changed

+45
-32
lines changed

1 file changed

+45
-32
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 45 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -691,6 +691,18 @@ impl <Signer: ChannelSigner> PeerState<Signer> {
691691
self.outbound_v1_channel_by_id.contains_key(channel_id) ||
692692
self.inbound_v1_channel_by_id.contains_key(channel_id)
693693
}
694+
695+
/// Returns a bool indicating if the given `channel_id` matches a channel we have with this peer
696+
/// that is in one of our pending (unfunded) channel maps.
697+
///
698+
/// NOTE: Although V1 established channels will always have a `temporary_channel_id` if they're
699+
/// in `(outbound/inbound)_v1_channel_by_id`, we use the more general `channel_id` as V2
700+
/// established channels will have a fixed `channel_id` already after the `accept_channel2`
701+
/// message is sent/received.
702+
fn has_pending_channel(&self, channel_id: &[u8; 32]) -> bool {
703+
self.outbound_v1_channel_by_id.contains_key(channel_id) ||
704+
self.inbound_v1_channel_by_id.contains_key(channel_id)
705+
}
694706
}
695707

696708
/// Stores a PaymentSecret and any other data we may need to validate an inbound payment is
@@ -2196,6 +2208,7 @@ where
21962208
for (_cp_id, peer_state_mutex) in per_peer_state.iter() {
21972209
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
21982210
let peer_state = &mut *peer_state_lock;
2211+
// Only `Channels` in the channel_by_id map can be considered funded.
21992212
for (_channel_id, channel) in peer_state.channel_by_id.iter().filter(f) {
22002213
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
22012214
peer_state.latest_features.clone());
@@ -2222,20 +2235,18 @@ where
22222235
for (_cp_id, peer_state_mutex) in per_peer_state.iter() {
22232236
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
22242237
let peer_state = &mut *peer_state_lock;
2225-
for (_channel_id, channel) in peer_state.channel_by_id.iter() {
2226-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2227-
peer_state.latest_features.clone());
2228-
res.push(details);
2238+
let chan_context_to_details = |context| {
2239+
ChannelDetails::from_channel_context(context, best_block_height,
2240+
peer_state.latest_features.clone())
2241+
};
2242+
for (_, channel) in peer_state.channel_by_id.iter() {
2243+
res.push(chan_context_to_details(&channel.context));
22292244
}
2230-
for (_channel_id, channel) in peer_state.inbound_v1_channel_by_id.iter() {
2231-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2232-
peer_state.latest_features.clone());
2233-
res.push(details);
2245+
for (_, channel) in peer_state.inbound_v1_channel_by_id.iter() {
2246+
res.push(chan_context_to_details(&channel.context));
22342247
}
2235-
for (_channel_id, channel) in peer_state.outbound_v1_channel_by_id.iter() {
2236-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2237-
peer_state.latest_features.clone());
2238-
res.push(details);
2248+
for (_, channel) in peer_state.outbound_v1_channel_by_id.iter() {
2249+
res.push(chan_context_to_details(&channel.context));
22392250
}
22402251
}
22412252
}
@@ -2264,10 +2275,14 @@ where
22642275
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
22652276
let peer_state = &mut *peer_state_lock;
22662277
let features = &peer_state.latest_features;
2278+
let chan_context_to_details = |context| {
2279+
ChannelDetails::from_channel_context(context, best_block_height, features.clone())
2280+
};
22672281
return peer_state.channel_by_id
22682282
.iter()
2269-
.map(|(_, channel)|
2270-
ChannelDetails::from_channel_context(&channel.context, best_block_height, features.clone()))
2283+
.map(|(_, channel)| chan_context_to_details(&channel.context))
2284+
.chain(peer_state.outbound_v1_channel_by_id.iter().map(|(_, channel)| chan_context_to_details(&channel.context)))
2285+
.chain(peer_state.inbound_v1_channel_by_id.iter().map(|(_, channel)| chan_context_to_details(&channel.context)))
22712286
.collect();
22722287
}
22732288
vec![]
@@ -7103,23 +7118,22 @@ where
71037118
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
71047119
let peer_state = &mut *peer_state_lock;
71057120
let pending_msg_events = &mut peer_state.pending_msg_events;
7106-
peer_state.channel_by_id.retain(|_, chan| {
7107-
let retain = if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7108-
if !chan.context.have_received_message() {
7109-
// If we created this (outbound) channel while we were disconnected from the
7110-
// peer we probably failed to send the open_channel message, which is now
7111-
// lost. We can't have had anything pending related to this channel, so we just
7112-
// drop it.
7113-
false
7114-
} else {
7115-
pending_msg_events.push(events::MessageSendEvent::SendChannelReestablish {
7116-
node_id: chan.context.get_counterparty_node_id(),
7117-
msg: chan.get_channel_reestablish(&self.logger),
7118-
});
7119-
true
7120-
}
7121-
} else { true };
7122-
if retain && chan.context.get_counterparty_node_id() != *counterparty_node_id {
7121+
peer_state.outbound_v1_channel_by_id.retain(|_, chan| {
7122+
if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7123+
// If we created this (outbound) channel while we were disconnected from the
7124+
// peer we probably failed to send the open_channel message, which is now
7125+
// lost. We can't have had anything pending related to this channel, so we just
7126+
// drop it.
7127+
chan.context.have_received_message()
7128+
} else { true }
7129+
});
7130+
peer_state.channel_by_id.iter_mut().for_each(|(_, chan)| {
7131+
if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7132+
pending_msg_events.push(events::MessageSendEvent::SendChannelReestablish {
7133+
node_id: chan.context.get_counterparty_node_id(),
7134+
msg: chan.get_channel_reestablish(&self.logger),
7135+
});
7136+
} else {
71237137
if let Some(msg) = chan.get_signed_channel_announcement(&self.node_signer, self.genesis_hash.clone(), self.best_block.read().unwrap().height(), &self.default_configuration) {
71247138
if let Ok(update_msg) = self.get_channel_update_for_broadcast(chan) {
71257139
pending_msg_events.push(events::MessageSendEvent::SendChannelAnnouncement {
@@ -7129,7 +7143,6 @@ where
71297143
}
71307144
}
71317145
}
7132-
retain
71337146
});
71347147
}
71357148
//TODO: Also re-broadcast announcement_signatures

0 commit comments

Comments
 (0)