Skip to content

Commit 310416a

Browse files
committed
Add hold times to update_fulfill_htlc
1 parent 374427a commit 310416a

File tree

6 files changed

+136
-27
lines changed

6 files changed

+136
-27
lines changed

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2511,6 +2511,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
25112511
channel_id: chan_id_2,
25122512
htlc_id: 0,
25132513
payment_preimage,
2514+
attribution_data: None,
25142515
};
25152516
if second_fails {
25162517
nodes[2].node.fail_htlc_backwards(&payment_hash);
@@ -2524,8 +2525,11 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
25242525

25252526
let cs_updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
25262527
assert_eq!(cs_updates.update_fulfill_htlcs.len(), 1);
2527-
// Check that the message we're about to deliver matches the one generated:
2528-
assert_eq!(fulfill_msg, cs_updates.update_fulfill_htlcs[0]);
2528+
2529+
// Check that the message we're about to deliver matches the one generated. Ignore attribution data.
2530+
assert_eq!(fulfill_msg.channel_id, cs_updates.update_fulfill_htlcs[0].channel_id);
2531+
assert_eq!(fulfill_msg.htlc_id, cs_updates.update_fulfill_htlcs[0].htlc_id);
2532+
assert_eq!(fulfill_msg.payment_preimage, cs_updates.update_fulfill_htlcs[0].payment_preimage);
25292533
}
25302534
nodes[1].node.handle_update_fulfill_htlc(nodes[2].node.get_our_node_id(), &fulfill_msg);
25312535
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);

lightning/src/ln/channel.rs

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ enum FeeUpdateState {
121121
enum InboundHTLCRemovalReason {
122122
FailRelay(msgs::OnionErrorPacket),
123123
FailMalformed(([u8; 32], u16)),
124-
Fulfill(PaymentPreimage),
124+
Fulfill(PaymentPreimage, Option<AttributionData>),
125125
}
126126

127127
/// Represents the resolution status of an inbound HTLC.
@@ -220,7 +220,7 @@ impl From<&InboundHTLCState> for Option<InboundHTLCStateDetails> {
220220
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
221221
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailMalformed(_)) =>
222222
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
223-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_)) =>
223+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_, _)) =>
224224
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFulfill),
225225
}
226226
}
@@ -251,7 +251,7 @@ impl InboundHTLCState {
251251

252252
fn preimage(&self) -> Option<PaymentPreimage> {
253253
match self {
254-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage)) => Some(*preimage),
254+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage, _)) => Some(*preimage),
255255
_ => None,
256256
}
257257
}
@@ -5347,7 +5347,7 @@ impl<SP: Deref> FundedChannel<SP> where
53475347
// (see equivalent if condition there).
53485348
assert!(!self.context.channel_state.can_generate_new_commitment());
53495349
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
5350-
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
5350+
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, AttributionData::new(), logger);
53515351
self.context.latest_monitor_update_id = mon_update_id;
53525352
if let UpdateFulfillFetch::NewClaim { msg, .. } = fulfill_resp {
53535353
assert!(!msg); // The HTLC must have ended up in the holding cell.
@@ -5356,7 +5356,7 @@ impl<SP: Deref> FundedChannel<SP> where
53565356

53575357
fn get_update_fulfill_htlc<L: Deref>(
53585358
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
5359-
payment_info: Option<PaymentClaimDetails>, logger: &L,
5359+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, logger: &L,
53605360
) -> UpdateFulfillFetch where L::Target: Logger {
53615361
// Either ChannelReady got set (which means it won't be unset) or there is no way any
53625362
// caller thought we could have something claimed (cause we wouldn't have accepted in an
@@ -5380,7 +5380,7 @@ impl<SP: Deref> FundedChannel<SP> where
53805380
match htlc.state {
53815381
InboundHTLCState::Committed => {},
53825382
InboundHTLCState::LocalRemoved(ref reason) => {
5383-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
5383+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
53845384
} else {
53855385
log_warn!(logger, "Have preimage and want to fulfill HTLC with payment hash {} we already failed against channel {}", &htlc.payment_hash, &self.context.channel_id());
53865386
debug_assert!(false, "Tried to fulfill an HTLC that was already failed");
@@ -5458,7 +5458,7 @@ impl<SP: Deref> FundedChannel<SP> where
54585458
return UpdateFulfillFetch::NewClaim { monitor_update, htlc_value_msat, msg: false };
54595459
}
54605460
log_trace!(logger, "Upgrading HTLC {} to LocalRemoved with a Fulfill in channel {}!", &htlc.payment_hash, &self.context.channel_id);
5461-
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone()));
5461+
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone(), Some(attribution_data)));
54625462
}
54635463

54645464
UpdateFulfillFetch::NewClaim {
@@ -5470,10 +5470,10 @@ impl<SP: Deref> FundedChannel<SP> where
54705470

54715471
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
54725472
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
5473-
payment_info: Option<PaymentClaimDetails>, logger: &L,
5473+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, logger: &L,
54745474
) -> UpdateFulfillCommitFetch where L::Target: Logger {
54755475
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
5476-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
5476+
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, attribution_data, logger) {
54775477
UpdateFulfillFetch::NewClaim { mut monitor_update, htlc_value_msat, msg } => {
54785478
// Even if we aren't supposed to let new monitor updates with commitment state
54795479
// updates run, we still need to push the preimage ChannelMonitorUpdateStep no
@@ -6201,7 +6201,7 @@ impl<SP: Deref> FundedChannel<SP> where
62016201
// We do not bother to track and include `payment_info` here, however.
62026202
let mut additional_monitor_update =
62036203
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } =
6204-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger)
6204+
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, AttributionData::new(), logger) // TODO: Attr data from holding cell
62056205
{ monitor_update } else { unreachable!() };
62066206
update_fulfill_count += 1;
62076207
monitor_update.updates.append(&mut additional_monitor_update.updates);
@@ -6366,7 +6366,7 @@ impl<SP: Deref> FundedChannel<SP> where
63666366
pending_inbound_htlcs.retain(|htlc| {
63676367
if let &InboundHTLCState::LocalRemoved(ref reason) = &htlc.state {
63686368
log_trace!(logger, " ...removing inbound LocalRemoved {}", &htlc.payment_hash);
6369-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
6369+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
63706370
value_to_self_msat_diff += htlc.amount_msat as i64;
63716371
}
63726372
*expecting_peer_commitment_signed = true;
@@ -7139,11 +7139,12 @@ impl<SP: Deref> FundedChannel<SP> where
71397139
failure_code: failure_code.clone(),
71407140
});
71417141
},
7142-
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage) => {
7142+
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage, ref attribution_data) => {
71437143
update_fulfill_htlcs.push(msgs::UpdateFulfillHTLC {
71447144
channel_id: self.context.channel_id(),
71457145
htlc_id: htlc.htlc_id,
71467146
payment_preimage: payment_preimage.clone(),
7147+
attribution_data: attribution_data.clone(),
71477148
});
71487149
},
71497150
}
@@ -10642,7 +10643,7 @@ impl<SP: Deref> Writeable for FundedChannel<SP> where SP::Target: SignerProvider
1064210643
1u8.write(writer)?;
1064310644
(hash, code).write(writer)?;
1064410645
},
10645-
InboundHTLCRemovalReason::Fulfill(preimage) => {
10646+
InboundHTLCRemovalReason::Fulfill(preimage, _) => { // TODO: Persistence
1064610647
2u8.write(writer)?;
1064710648
preimage.write(writer)?;
1064810649
},
@@ -11003,7 +11004,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel
1100311004
attribution_data: None,
1100411005
}),
1100511006
1 => InboundHTLCRemovalReason::FailMalformed(Readable::read(reader)?),
11006-
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?),
11007+
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?, None), // TODO: Persistence
1100711008
_ => return Err(DecodeError::InvalidValue),
1100811009
};
1100911010
InboundHTLCState::LocalRemoved(reason)

lightning/src/ln/channelmanager.rs

Lines changed: 50 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ use crate::types::features::Bolt11InvoiceFeatures;
5959
use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route};
6060
use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data};
6161
use crate::ln::msgs;
62-
use crate::ln::onion_utils::{self};
62+
use crate::ln::onion_utils::{self, process_onion_success, HOLD_TIME_LEN};
6363
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
6464
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, DecodeError, LightningError, MessageSendEvent};
6565
#[cfg(test)]
@@ -88,6 +88,8 @@ use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, Rea
8888
use crate::util::logger::{Level, Logger, WithContext};
8989
use crate::util::errors::APIError;
9090

91+
use crate::ln::onion_utils::AttributionData;
92+
9193
#[cfg(async_payments)] use {
9294
crate::offers::offer::Amount,
9395
crate::offers::static_invoice::{DEFAULT_RELATIVE_EXPIRY as STATIC_INVOICE_DEFAULT_RELATIVE_EXPIRY, StaticInvoice, StaticInvoiceBuilder},
@@ -7239,8 +7241,18 @@ where
72397241
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
72407242
}
72417243
});
7244+
7245+
log_info!(self.logger, "ONLY ONCE");
7246+
7247+
let mut attribution_data = AttributionData::new();
7248+
let hold_time_bytes: [u8; 4] = (100 as u32).to_be_bytes();
7249+
attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7250+
attribution_data.add_hmacs(&htlc.prev_hop.incoming_packet_shared_secret, &[]);
7251+
attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7252+
72427253
self.claim_funds_from_hop(
72437254
htlc.prev_hop, payment_preimage, payment_info.clone(),
7255+
attribution_data,
72447256
|_, definitely_duplicate| {
72457257
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
72467258
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7269,7 +7281,7 @@ where
72697281
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72707282
>(
72717283
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7272-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7284+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
72737285
) {
72747286
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
72757287
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7282,15 +7294,17 @@ where
72827294
channel_id: prev_hop.channel_id,
72837295
htlc_id: prev_hop.htlc_id,
72847296
};
7285-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7297+
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, attribution_data, completion_action)
72867298
}
72877299

72887300
fn claim_mpp_part<
72897301
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72907302
>(
72917303
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7292-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7304+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
72937305
) {
7306+
log_info!(self.logger, "claim_mpp_part called");
7307+
72947308
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
72957309

72967310
// If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -7322,7 +7336,7 @@ where
73227336
if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
73237337
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
73247338
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7325-
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7339+
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
73267340

73277341
match fulfill_res {
73287342
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7474,9 +7488,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74747488
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
74757489
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
74767490
next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7491+
attribution_data: Option<&AttributionData>,
74777492
) {
7493+
log_info!(self.logger, "claim_funds_internal - ONLY NON FINAL");
74787494
match source {
74797495
HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7496+
if let Some(attribution_data) = attribution_data {
7497+
process_onion_success(&self.secp_ctx, &self.logger, &path,
7498+
&session_priv, attribution_data.clone());
7499+
}
7500+
74807501
debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
74817502
"We don't support claim_htlc claims during startup - monitors may not be available yet");
74827503
debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7493,7 +7514,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74937514
let prev_user_channel_id = hop_data.user_channel_id;
74947515
let prev_node_id = hop_data.counterparty_node_id;
74957516
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7496-
self.claim_funds_from_hop(hop_data, payment_preimage, None,
7517+
7518+
let mut attribution_data = attribution_data
7519+
.map_or(AttributionData::new(), |attribution_data| {
7520+
let mut attribution_data = attribution_data.clone();
7521+
7522+
attribution_data.shift_right();
7523+
7524+
attribution_data
7525+
});
7526+
7527+
// Fake hold time here.
7528+
let hold_time = hop_data.incoming_packet_shared_secret[0];
7529+
7530+
let hold_time_bytes: [u8; 4] = (10 + hold_time as u32).to_be_bytes();
7531+
attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7532+
attribution_data.add_hmacs(&hop_data.incoming_packet_shared_secret, &[]);
7533+
attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7534+
7535+
self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
74977536
|htlc_claim_value_msat, definitely_duplicate| {
74987537
let chan_to_release = Some(EventUnblockedChannel {
74997538
counterparty_node_id: next_channel_counterparty_node_id,
@@ -8937,7 +8976,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
89378976
};
89388977
self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
89398978
Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
8940-
funding_txo, msg.channel_id, Some(next_user_channel_id),
8979+
funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
89418980
);
89428981

89438982
Ok(())
@@ -9638,6 +9677,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96389677
htlc_update.source, preimage,
96399678
htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
96409679
false, counterparty_node_id, funding_outpoint, channel_id, None,
9680+
None,
96419681
);
96429682
} else {
96439683
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -12154,6 +12194,7 @@ where
1215412194
}
1215512195

1215612196
fn handle_update_fulfill_htlc(&self, counterparty_node_id: PublicKey, msg: &msgs::UpdateFulfillHTLC) {
12197+
log_info!(self.logger, "Received update_fulfill_htlc: {:?}", msg);
1215712198
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
1215812199
let _ = handle_error!(self, self.internal_update_fulfill_htlc(&counterparty_node_id, msg), counterparty_node_id);
1215912200
}
@@ -14905,7 +14946,7 @@ where
1490514946
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1490614947
// no need to worry about getting it into others.
1490714948
channel_manager.claim_mpp_part(
14908-
part.into(), payment_preimage, None,
14949+
part.into(), payment_preimage, None, AttributionData::new(),
1490914950
|_, _|
1491014951
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1491114952
);
@@ -15011,7 +15052,7 @@ where
1501115052
// channel is closed we just assume that it probably came from an on-chain claim.
1501215053
channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1501315054
downstream_closed, true, downstream_node_id, downstream_funding,
15014-
downstream_channel_id, None
15055+
downstream_channel_id, None, None,
1501515056
);
1501615057
}
1501715058

lightning/src/ln/htlc_reserve_unit_tests.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1785,6 +1785,7 @@ pub fn test_update_fulfill_htlc_bolt2_update_fulfill_htlc_before_commitment() {
17851785
channel_id: chan.2,
17861786
htlc_id: 0,
17871787
payment_preimage: our_payment_preimage,
1788+
attribution_data: None,
17881789
};
17891790

17901791
nodes[0].node.handle_update_fulfill_htlc(node_b_id, &update_msg);

lightning/src/ln/msgs.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -735,6 +735,9 @@ pub struct UpdateFulfillHTLC {
735735
pub htlc_id: u64,
736736
/// The pre-image of the payment hash, allowing HTLC redemption
737737
pub payment_preimage: PaymentPreimage,
738+
739+
/// Optional field for the attribution data that allows the sender to pinpoint the failing node under all conditions
740+
pub attribution_data: Option<AttributionData>,
738741
}
739742

740743
/// A [`peer_storage`] message that can be sent to or received from a peer.
@@ -3076,7 +3079,10 @@ impl_writeable_msg!(UpdateFulfillHTLC, {
30763079
channel_id,
30773080
htlc_id,
30783081
payment_preimage
3079-
}, {});
3082+
}, {
3083+
// Specified TLV key 1 plus 100 during testing phase.
3084+
(101, attribution_data, option)
3085+
});
30803086

30813087
impl_writeable_msg!(PeerStorage, { data }, {});
30823088

@@ -5552,6 +5558,7 @@ mod tests {
55525558
channel_id: ChannelId::from_bytes([2; 32]),
55535559
htlc_id: 2316138423780173,
55545560
payment_preimage: PaymentPreimage([1; 32]),
5561+
attribution_data: None,
55555562
};
55565563
let encoded_value = update_fulfill_htlc.encode();
55575564
let target_value = <Vec<u8>>::from_hex("020202020202020202020202020202020202020202020202020202020202020200083a840000034d0101010101010101010101010101010101010101010101010101010101010101").unwrap();

0 commit comments

Comments
 (0)