Skip to content

Commit b55761a

Browse files
committed
ln+events+liquidity/refactor: NextHopChannel renamed ForwardFailed
Standardize naming within the HTLCHandlingType struct to present more consistent API terminology.
1 parent 6d985f8 commit b55761a

14 files changed

+76
-74
lines changed

lightning-liquidity/src/lsps2/service.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -879,8 +879,10 @@ where
879879
/// or if the payment queue is empty
880880
///
881881
/// [`Event::HTLCHandlingFailed`]: lightning::events::Event::HTLCHandlingFailed
882-
pub fn htlc_handling_failed(&self, handling_type: HTLCHandlingType) -> Result<(), APIError> {
883-
if let HTLCHandlingType::NextHopChannel { channel_id, .. } = handling_type {
882+
pub fn htlc_handling_failed(
883+
&self, handling_type: HTLCHandlingType,
884+
) -> Result<(), APIError> {
885+
if let HTLCHandlingType::ForwardFailed { channel_id, .. } = handling_type {
884886
let peer_by_channel_id = self.peer_by_channel_id.read().unwrap();
885887
if let Some(counterparty_node_id) = peer_by_channel_id.get(&channel_id) {
886888
let outer_state_lock = self.per_peer_state.read().unwrap();

lightning/src/events/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -470,7 +470,7 @@ impl_writeable_tlv_based_enum_upgradable!(ClosureReason,
470470
pub enum HTLCHandlingType {
471471
/// We tried forwarding to a channel but failed to do so. An example of such an instance is when
472472
/// there is insufficient capacity in our outbound channel.
473-
NextHopChannel {
473+
ForwardFailed {
474474
/// The `node_id` of the next node. For backwards compatibility, this field is
475475
/// marked as optional, versions prior to 0.0.110 may not always be able to provide
476476
/// counterparty node information.
@@ -508,7 +508,7 @@ pub enum HTLCHandlingType {
508508
}
509509

510510
impl_writeable_tlv_based_enum_upgradable!(HTLCHandlingType,
511-
(0, NextHopChannel) => {
511+
(0, ForwardFailed) => {
512512
(0, node_id, required),
513513
(2, channel_id, required),
514514
},

lightning/src/ln/blinded_payment_tests.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -428,7 +428,7 @@ fn do_forward_checks_failure(check: ForwardCheckFail, intro_fails: bool) {
428428
ForwardCheckFail::InboundOnionCheck => HTLCHandlingType::InvalidOnion,
429429
ForwardCheckFail::ForwardPayloadEncodedAsReceive => HTLCHandlingType::InvalidOnion,
430430
ForwardCheckFail::OutboundChannelCheck =>
431-
HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 },
431+
HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 },
432432
};
433433
expect_htlc_handling_failed_destinations!(
434434
nodes[1].node.get_and_clear_pending_events(), &[failed_destination.clone()]
@@ -459,7 +459,7 @@ fn do_forward_checks_failure(check: ForwardCheckFail, intro_fails: bool) {
459459
let failed_destination = match check {
460460
ForwardCheckFail::InboundOnionCheck|ForwardCheckFail::ForwardPayloadEncodedAsReceive => HTLCHandlingType::InvalidOnion,
461461
ForwardCheckFail::OutboundChannelCheck =>
462-
HTLCHandlingType::NextHopChannel { node_id: Some(nodes[3].node.get_our_node_id()), channel_id: chan_2_3.2 },
462+
HTLCHandlingType::ForwardFailed { node_id: Some(nodes[3].node.get_our_node_id()), channel_id: chan_2_3.2 },
463463
};
464464
expect_htlc_handling_failed_destinations!(
465465
nodes[2].node.get_and_clear_pending_events(), &[failed_destination.clone()]
@@ -606,7 +606,7 @@ fn do_forward_fail_in_process_pending_htlc_fwds(check: ProcessPendingHTLCsCheck,
606606
$curr_node.node.peer_disconnected($next_node.node.get_our_node_id());
607607
expect_pending_htlcs_forwardable!($curr_node);
608608
expect_htlc_handling_failed_destinations!($curr_node.node.get_and_clear_pending_events(),
609-
vec![HTLCHandlingType::NextHopChannel { node_id: Some($next_node.node.get_our_node_id()), channel_id: $failed_chan_id }]);
609+
vec![HTLCHandlingType::ForwardFailed { node_id: Some($next_node.node.get_our_node_id()), channel_id: $failed_chan_id }]);
610610
},
611611
ProcessPendingHTLCsCheck::FwdChannelClosed => {
612612
// Force close the next-hop channel so when we go to forward in process_pending_htlc_forwards,
@@ -1243,7 +1243,7 @@ fn min_htlc() {
12431243
expect_pending_htlcs_forwardable!(nodes[1]);
12441244
expect_htlc_handling_failed_destinations!(
12451245
nodes[1].node.get_and_clear_pending_events(),
1246-
&[HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 }]
1246+
&[HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 }]
12471247
);
12481248
check_added_monitors(&nodes[1], 1);
12491249
let mut updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -905,7 +905,7 @@ fn do_test_monitor_update_fail_raa(test_ignore_second_cs: bool) {
905905
let (latest_update, _) = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap().get(&chan_2.2).unwrap().clone();
906906
nodes[1].chain_monitor.chain_monitor.force_channel_monitor_updated(chan_2.2, latest_update);
907907
check_added_monitors!(nodes[1], 0);
908-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
908+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
909909
check_added_monitors!(nodes[1], 1);
910910

911911
let mut events_3 = nodes[1].node.get_and_clear_pending_msg_events();
@@ -1752,7 +1752,7 @@ fn test_monitor_update_on_pending_forwards() {
17521752
commitment_signed_dance!(nodes[1], nodes[2], payment_event.commitment_msg, false);
17531753

17541754
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
1755-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
1755+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
17561756
check_added_monitors!(nodes[1], 1);
17571757

17581758
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::Completed);
@@ -2159,7 +2159,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
21592159
check_closed_broadcast!(nodes[1], true);
21602160
connect_blocks(&nodes[1], ANTI_REORG_DELAY - 1);
21612161
check_added_monitors!(nodes[1], 1);
2162-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
2162+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
21632163

21642164
nodes[0].node.handle_update_fulfill_htlc(nodes[1].node.get_our_node_id(), &bs_updates.update_fulfill_htlcs[0]);
21652165
expect_payment_sent(&nodes[0], payment_preimage, None, false, false);
@@ -2549,7 +2549,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
25492549
let mut reconnect_args = ReconnectArgs::new(&nodes[1], &nodes[2]);
25502550
reconnect_args.pending_htlc_fails.0 = 1;
25512551
reconnect_nodes(reconnect_args);
2552-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
2552+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
25532553
} else {
25542554
let mut reconnect_args = ReconnectArgs::new(&nodes[1], &nodes[2]);
25552555
reconnect_args.pending_htlc_claims.0 = 1;

lightning/src/ln/channelmanager.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3297,7 +3297,7 @@ macro_rules! handle_monitor_update_completion {
32973297
}
32983298
$self.finalize_claims(updates.finalized_claimed_htlcs);
32993299
for failure in updates.failed_htlcs.drain(..) {
3300-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
3300+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
33013301
$self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
33023302
}
33033303
} }
@@ -3917,7 +3917,7 @@ where
39173917
for htlc_source in failed_htlcs.drain(..) {
39183918
let failure_reason = LocalHTLCFailureReason::DroppedPending;
39193919
let reason = HTLCFailReason::from_failure_code(failure_reason);
3920-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
3920+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
39213921
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
39223922
}
39233923

@@ -4041,7 +4041,7 @@ where
40414041
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
40424042
let failure_reason = LocalHTLCFailureReason::DroppedPending;
40434043
let reason = HTLCFailReason::from_failure_code(failure_reason);
4044-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
4044+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
40454045
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
40464046
}
40474047
if let Some((_, funding_txo, _channel_id, monitor_update)) = shutdown_res.monitor_update {
@@ -5737,7 +5737,7 @@ where
57375737
if let Some(outgoing_scid) = outgoing_scid_opt {
57385738
match self.short_to_chan_info.read().unwrap().get(&outgoing_scid) {
57395739
Some((outgoing_counterparty_node_id, outgoing_channel_id)) =>
5740-
HTLCHandlingType::NextHopChannel {
5740+
HTLCHandlingType::ForwardFailed {
57415741
node_id: Some(*outgoing_counterparty_node_id),
57425742
channel_id: *outgoing_channel_id,
57435743
},
@@ -6104,7 +6104,7 @@ where
61046104
let data = self.get_htlc_inbound_temp_fail_data(reason);
61056105
failed_forwards.push((htlc_source, payment_hash,
61066106
HTLCFailReason::reason(reason, data),
6107-
HTLCHandlingType::NextHopChannel { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
6107+
HTLCHandlingType::ForwardFailed { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
61086108
));
61096109
} else {
61106110
forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
@@ -6960,7 +6960,7 @@ where
69606960

69616961
for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
69626962
let reason = HTLCFailReason::reason(failure_reason, onion_failure_data.clone());
6963-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id };
6963+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id.clone()), channel_id };
69646964
self.fail_htlc_backwards_internal(&htlc_src, &payment_hash, &reason, receiver);
69656965
}
69666966
}
@@ -8745,7 +8745,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
87458745
}
87468746
}
87478747
for htlc_source in dropped_htlcs.drain(..) {
8748-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
8748+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
87498749
let reason = HTLCFailReason::from_failure_code(LocalHTLCFailureReason::DroppedPending);
87508750
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
87518751
}
@@ -9571,7 +9571,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
95719571
} else {
95729572
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
95739573
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
9574-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
9574+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
95759575
let reason = HTLCFailReason::from_failure_code(failure_reason);
95769576
self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
95779577
}
@@ -11649,7 +11649,7 @@ where
1164911649
let reason = LocalHTLCFailureReason::CLTVExpiryTooSoon;
1165011650
let data = self.get_htlc_inbound_temp_fail_data(reason);
1165111651
timed_out_htlcs.push((source, payment_hash, HTLCFailReason::reason(reason, data),
11652-
HTLCHandlingType::NextHopChannel { node_id: Some(funded_channel.context.get_counterparty_node_id()), channel_id: funded_channel.context.channel_id() }));
11652+
HTLCHandlingType::ForwardFailed { node_id: Some(funded_channel.context.get_counterparty_node_id()), channel_id: funded_channel.context.channel_id() }));
1165311653
}
1165411654
let logger = WithChannelContext::from(&self.logger, &funded_channel.context, None);
1165511655
if let Some(channel_ready) = channel_ready_opt {
@@ -14875,7 +14875,7 @@ where
1487514875
for htlc_source in failed_htlcs.drain(..) {
1487614876
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
1487714877
let failure_reason = LocalHTLCFailureReason::DroppedPending;
14878-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
14878+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
1487914879
let reason = HTLCFailReason::from_failure_code(failure_reason);
1488014880
channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
1488114881
}

lightning/src/ln/functional_test_utils.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2150,7 +2150,7 @@ pub fn do_commitment_signed_dance(node_a: &Node<'_, '_, '_>, node_b: &Node<'_, '
21502150

21512151
if fail_backwards {
21522152
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node_a,
2153-
vec![crate::events::HTLCHandlingType::NextHopChannel{ node_id: Some(node_b.node.get_our_node_id()), channel_id: commitment_signed.channel_id }]);
2153+
vec![crate::events::HTLCHandlingType::ForwardFailed{ node_id: Some(node_b.node.get_our_node_id()), channel_id: commitment_signed.channel_id }]);
21542154
check_added_monitors!(node_a, 1);
21552155

21562156
let node_a_per_peer_state = node_a.node.per_peer_state.read().unwrap();
@@ -3214,7 +3214,7 @@ pub fn pass_failed_payment_back<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, expe
32143214
node.node.handle_update_fail_htlc(prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
32153215
commitment_signed_dance!(node, prev_node, next_msgs.as_ref().unwrap().1, update_next_node);
32163216
if !update_next_node {
3217-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node, vec![HTLCHandlingType::NextHopChannel { node_id: Some(prev_node.node.get_our_node_id()), channel_id: next_msgs.as_ref().unwrap().0.channel_id }]);
3217+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node, vec![HTLCHandlingType::ForwardFailed { node_id: Some(prev_node.node.get_our_node_id()), channel_id: next_msgs.as_ref().unwrap().0.channel_id }]);
32183218
}
32193219
}
32203220
let events = node.node.get_and_clear_pending_msg_events();

0 commit comments

Comments
 (0)