Skip to content

Commit ce2ccac

Browse files
Rebuild manager forwarded htlcs maps from Channels
XXX
1 parent 3f80b42 commit ce2ccac

File tree

3 files changed

+126
-13
lines changed

3 files changed

+126
-13
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 36 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11402,6 +11402,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1140211402

1140311403
if !new_intercept_events.is_empty() {
1140411404
let mut events = self.pending_events.lock().unwrap();
11405+
new_intercept_events.retain(|new_ev| !events.contains(new_ev));
1140511406
events.append(&mut new_intercept_events);
1140611407
}
1140711408
}
@@ -17035,7 +17036,11 @@ where
1703517036

1703617037
const MAX_ALLOC_SIZE: usize = 1024 * 64;
1703717038
let forward_htlcs_count: u64 = Readable::read(reader)?;
17038-
let mut forward_htlcs = hash_map_with_capacity(cmp::min(forward_htlcs_count as usize, 128));
17039+
// This map is read but may no longer be used because we'll attempt to rebuild `forward_htlcs`
17040+
// from the `Channel{Monitor}`s instead, as a step towards getting rid of `ChannelManager`
17041+
// persistence.
17042+
let mut forward_htlcs_legacy: HashMap<u64, Vec<HTLCForwardInfo>> =
17043+
hash_map_with_capacity(cmp::min(forward_htlcs_count as usize, 128));
1703917044
for _ in 0..forward_htlcs_count {
1704017045
let short_channel_id = Readable::read(reader)?;
1704117046
let pending_forwards_count: u64 = Readable::read(reader)?;
@@ -17046,7 +17051,7 @@ where
1704617051
for _ in 0..pending_forwards_count {
1704717052
pending_forwards.push(Readable::read(reader)?);
1704817053
}
17049-
forward_htlcs.insert(short_channel_id, pending_forwards);
17054+
forward_htlcs_legacy.insert(short_channel_id, pending_forwards);
1705017055
}
1705117056

1705217057
let claimable_htlcs_count: u64 = Readable::read(reader)?;
@@ -17134,12 +17139,18 @@ where
1713417139
};
1713517140
}
1713617141

17142+
// Some maps are read but may no longer be used because we attempt to rebuild pending HTLC
17143+
// forwards from the `Channel{Monitor}`s instead, as a step towards getting rid of
17144+
// `ChannelManager` persistence.
17145+
let mut pending_intercepted_htlcs_legacy: Option<HashMap<InterceptId, PendingAddHTLCInfo>> =
17146+
Some(new_hash_map());
17147+
let mut decode_update_add_htlcs_legacy: Option<HashMap<u64, Vec<msgs::UpdateAddHTLC>>> =
17148+
None;
17149+
1713717150
// pending_outbound_payments_no_retry is for compatibility with 0.0.101 clients.
1713817151
let mut pending_outbound_payments_no_retry: Option<HashMap<PaymentId, HashSet<[u8; 32]>>> =
1713917152
None;
1714017153
let mut pending_outbound_payments = None;
17141-
let mut pending_intercepted_htlcs: Option<HashMap<InterceptId, PendingAddHTLCInfo>> =
17142-
Some(new_hash_map());
1714317154
let mut received_network_pubkey: Option<PublicKey> = None;
1714417155
let mut fake_scid_rand_bytes: Option<[u8; 32]> = None;
1714517156
let mut probing_cookie_secret: Option<[u8; 32]> = None;
@@ -17157,14 +17168,12 @@ where
1715717168
let mut in_flight_monitor_updates: Option<
1715817169
HashMap<(PublicKey, ChannelId), Vec<ChannelMonitorUpdate>>,
1715917170
> = None;
17160-
let mut decode_update_add_htlcs_legacy: Option<HashMap<u64, Vec<msgs::UpdateAddHTLC>>> =
17161-
None;
1716217171
let mut inbound_payment_id_secret = None;
1716317172
let mut peer_storage_dir: Option<Vec<(PublicKey, Vec<u8>)>> = None;
1716417173
let mut async_receive_offer_cache: AsyncReceiveOfferCache = AsyncReceiveOfferCache::new();
1716517174
read_tlv_fields!(reader, {
1716617175
(1, pending_outbound_payments_no_retry, option),
17167-
(2, pending_intercepted_htlcs, option),
17176+
(2, pending_intercepted_htlcs_legacy, option),
1716817177
(3, pending_outbound_payments, option),
1716917178
(4, pending_claiming_payments, option),
1717017179
(5, received_network_pubkey, option),
@@ -17586,7 +17595,7 @@ where
1758617595
"HTLC was forwarded to the closed channel",
1758717596
&args.logger,
1758817597
);
17589-
forward_htlcs.retain(|_, forwards| {
17598+
forward_htlcs_legacy.retain(|_, forwards| {
1759017599
forwards.retain(|forward| {
1759117600
if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
1759217601
if pending_forward_matches_htlc(&htlc_info) {
@@ -17598,7 +17607,7 @@ where
1759817607
});
1759917608
!forwards.is_empty()
1760017609
});
17601-
pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
17610+
pending_intercepted_htlcs_legacy.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
1760217611
if pending_forward_matches_htlc(&htlc_info) {
1760317612
log_info!(logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
1760417613
&htlc.payment_hash, &monitor.channel_id());
@@ -18106,6 +18115,22 @@ where
1810618115
)
1810718116
.with_async_payments_offers_cache(async_receive_offer_cache);
1810818117

18118+
// If we are reading from a `ChannelManager` that was last serialized on LDK 0.2 or earlier, we
18119+
// won't have been able to rebuild `decode_update_add_htlcs` from `Channel`s and should use
18120+
// the legacy serialized maps instead.
18121+
// TODO: if we read an upgraded channel but there just happened to be no committed update_adds
18122+
// present, we'll use the old maps here. Maybe that's fine but we might want to add a flag in
18123+
// the `Channel` that indicates it is upgraded and will serialize committed update_adds.
18124+
let (forward_htlcs, decode_update_add_htlcs, pending_intercepted_htlcs) =
18125+
if decode_update_add_htlcs.is_empty() {
18126+
(
18127+
forward_htlcs_legacy,
18128+
decode_update_add_htlcs_legacy,
18129+
pending_intercepted_htlcs_legacy.unwrap(),
18130+
)
18131+
} else {
18132+
(new_hash_map(), decode_update_add_htlcs, new_hash_map())
18133+
};
1810918134
let channel_manager = ChannelManager {
1811018135
chain_hash,
1811118136
fee_estimator: bounded_fee_estimator,
@@ -18118,10 +18143,10 @@ where
1811818143

1811918144
inbound_payment_key: expanded_inbound_key,
1812018145
pending_outbound_payments: pending_outbounds,
18121-
pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs.unwrap()),
18146+
pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs),
1812218147

1812318148
forward_htlcs: Mutex::new(forward_htlcs),
18124-
decode_update_add_htlcs: Mutex::new(decode_update_add_htlcs_legacy),
18149+
decode_update_add_htlcs: Mutex::new(decode_update_add_htlcs),
1812518150
claimable_payments: Mutex::new(ClaimablePayments {
1812618151
claimable_payments,
1812718152
pending_claiming_payments: pending_claiming_payments.unwrap(),

lightning/src/ln/functional_test_utils.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1382,9 +1382,10 @@ macro_rules! reload_node {
13821382
$node.onion_messenger.set_async_payments_handler(&$new_channelmanager);
13831383
};
13841384
($node: expr, $chanman_encoded: expr, $monitors_encoded: expr, $persister: ident, $new_chain_monitor: ident, $new_channelmanager: ident) => {
1385+
let config = $node.node.get_current_config();
13851386
reload_node!(
13861387
$node,
1387-
test_default_channel_config(),
1388+
config,
13881389
$chanman_encoded,
13891390
$monitors_encoded,
13901391
$persister,

lightning/src/ln/reload_tests.rs

Lines changed: 88 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ use crate::chain::transaction::OutPoint;
2020
use crate::events::{ClosureReason, Event, HTLCHandlingFailureType};
2121
use crate::ln::channelmanager::{ChannelManager, ChannelManagerReadArgs, PaymentId, RecipientOnionFields, RAACommitmentOrder};
2222
use crate::ln::msgs;
23+
use crate::ln::outbound_payment::Retry;
2324
use crate::ln::types::ChannelId;
2425
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, RoutingMessageHandler, ErrorAction, MessageSendEvent};
2526
use crate::util::test_channel_signer::TestChannelSigner;
@@ -508,7 +509,6 @@ fn test_manager_serialize_deserialize_inconsistent_monitor() {
508509

509510
#[cfg(feature = "std")]
510511
fn do_test_data_loss_protect(reconnect_panicing: bool, substantially_old: bool, not_stale: bool) {
511-
use crate::ln::channelmanager::Retry;
512512
use crate::types::string::UntrustedString;
513513
// When we get a data_loss_protect proving we're behind, we immediately panic as the
514514
// chain::Watch API requirements have been violated (e.g. the user restored from a backup). The
@@ -1173,6 +1173,93 @@ fn removed_payment_no_manager_persistence() {
11731173
expect_payment_failed!(nodes[0], payment_hash, false);
11741174
}
11751175

1176+
#[test]
1177+
fn manager_persisted_pre_htlc_forward_on_outbound_edge() {
1178+
do_manager_persisted_pre_htlc_forward_on_outbound_edge(false);
1179+
}
1180+
1181+
#[test]
1182+
fn manager_persisted_pre_intercept_forward_on_outbound_edge() {
1183+
do_manager_persisted_pre_htlc_forward_on_outbound_edge(true);
1184+
}
1185+
1186+
fn do_manager_persisted_pre_htlc_forward_on_outbound_edge(intercept_htlc: bool) {
1187+
let chanmon_cfgs = create_chanmon_cfgs(3);
1188+
let node_cfgs = create_node_cfgs(3, &chanmon_cfgs);
1189+
let persister;
1190+
let new_chain_monitor;
1191+
let mut intercept_forwards_config = test_default_channel_config();
1192+
intercept_forwards_config.accept_intercept_htlcs = true;
1193+
let node_chanmgrs = create_node_chanmgrs(3, &node_cfgs, &[None, Some(intercept_forwards_config), None]);
1194+
let nodes_1_deserialized;
1195+
let mut nodes = create_network(3, &node_cfgs, &node_chanmgrs);
1196+
1197+
let chan_id_1 = create_announced_chan_between_nodes(&nodes, 0, 1).2;
1198+
let chan_id_2 = create_announced_chan_between_nodes(&nodes, 1, 2).2;
1199+
1200+
let intercept_scid = nodes[1].node.get_intercept_scid();
1201+
1202+
// Lock in the HTLC from node_a <> node_b.
1203+
let amt_msat = 5000;
1204+
let (mut route, payment_hash, payment_preimage, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[2], amt_msat);
1205+
if intercept_htlc {
1206+
route.paths[0].hops[1].short_channel_id = intercept_scid;
1207+
}
1208+
nodes[0].node.send_payment_with_route(route, payment_hash, RecipientOnionFields::secret_only(payment_secret), PaymentId(payment_hash.0)).unwrap();
1209+
check_added_monitors(&nodes[0], 1);
1210+
let updates = get_htlc_update_msgs(&nodes[0], &nodes[1].node.get_our_node_id());
1211+
nodes[1].node.handle_update_add_htlc(nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
1212+
do_commitment_signed_dance(&nodes[1], &nodes[0], &updates.commitment_signed, false, false);
1213+
1214+
// Decode the HTLC onion but don't forward it to the next hop, such that the HTLC ends up in
1215+
// `ChannelManager::forward_htlcs` or `ChannelManager::pending_intercepted_htlcs`.
1216+
nodes[1].node.process_pending_update_add_htlcs();
1217+
1218+
// Disconnect peers and reload the forwarding node_b.
1219+
nodes[0].node.peer_disconnected(nodes[1].node.get_our_node_id());
1220+
nodes[2].node.peer_disconnected(nodes[1].node.get_our_node_id());
1221+
1222+
let node_b_encoded = nodes[1].node.encode();
1223+
1224+
let chan_0_monitor_serialized = get_monitor!(nodes[1], chan_id_1).encode();
1225+
let chan_1_monitor_serialized = get_monitor!(nodes[1], chan_id_2).encode();
1226+
reload_node!(nodes[1], node_b_encoded, &[&chan_0_monitor_serialized, &chan_1_monitor_serialized], persister, new_chain_monitor, nodes_1_deserialized);
1227+
1228+
reconnect_nodes(ReconnectArgs::new(&nodes[1], &nodes[0]));
1229+
let mut args_b_c = ReconnectArgs::new(&nodes[1], &nodes[2]);
1230+
args_b_c.send_channel_ready = (true, true);
1231+
args_b_c.send_announcement_sigs = (true, true);
1232+
reconnect_nodes(args_b_c);
1233+
1234+
// Forward the HTLC and ensure we can claim it post-reload.
1235+
nodes[1].node.process_pending_htlc_forwards();
1236+
1237+
if intercept_htlc {
1238+
let events = nodes[1].node.get_and_clear_pending_events();
1239+
assert_eq!(events.len(), 1);
1240+
let (intercept_id, expected_outbound_amt_msat) = match events[0] {
1241+
Event::HTLCIntercepted { intercept_id, expected_outbound_amount_msat, .. } => {
1242+
(intercept_id, expected_outbound_amount_msat)
1243+
},
1244+
_ => panic!()
1245+
};
1246+
nodes[1].node.forward_intercepted_htlc(intercept_id, &chan_id_2,
1247+
nodes[2].node.get_our_node_id(), expected_outbound_amt_msat).unwrap();
1248+
nodes[1].node.process_pending_htlc_forwards();
1249+
}
1250+
check_added_monitors(&nodes[1], 1);
1251+
1252+
let updates = get_htlc_update_msgs(&nodes[1], &nodes[2].node.get_our_node_id());
1253+
nodes[2].node.handle_update_add_htlc(nodes[1].node.get_our_node_id(), &updates.update_add_htlcs[0]);
1254+
do_commitment_signed_dance(&nodes[2], &nodes[1], &updates.commitment_signed, false, false);
1255+
expect_and_process_pending_htlcs(&nodes[2], false);
1256+
1257+
expect_payment_claimable!(nodes[2], payment_hash, payment_secret, amt_msat, None, nodes[2].node.get_our_node_id());
1258+
let path: &[&[_]] = &[&[&nodes[1], &nodes[2]]];
1259+
do_claim_payment_along_route(ClaimAlongRouteArgs::new(&nodes[0], path, payment_preimage));
1260+
expect_payment_sent(&nodes[0], payment_preimage, None, true, true);
1261+
}
1262+
11761263
#[test]
11771264
fn test_reload_partial_funding_batch() {
11781265
let chanmon_cfgs = create_chanmon_cfgs(3);

0 commit comments

Comments
 (0)