Skip to content

Commit 6157c89

Browse files
committed
Add hold times to update_fulfill_htlc
1 parent 374427a commit 6157c89

File tree

6 files changed

+148
-25
lines changed

6 files changed

+148
-25
lines changed

lightning/src/ln/chanmon_update_fail_tests.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2511,6 +2511,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
25112511
channel_id: chan_id_2,
25122512
htlc_id: 0,
25132513
payment_preimage,
2514+
attribution_data: None,
25142515
};
25152516
if second_fails {
25162517
nodes[2].node.fail_htlc_backwards(&payment_hash);

lightning/src/ln/channel.rs

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ use crate::sync::Mutex;
7878
use crate::sign::type_resolver::ChannelSignerType;
7979

8080
use super::channel_keys::{DelayedPaymentBasepoint, HtlcBasepoint, RevocationBasepoint};
81+
use super::onion_utils::{HMAC_COUNT, HOLD_TIME_LEN, MAX_HOPS, HMAC_LEN};
8182

8283
#[cfg(any(test, feature = "_test_utils"))]
8384
#[allow(unused)]
@@ -121,7 +122,7 @@ enum FeeUpdateState {
121122
enum InboundHTLCRemovalReason {
122123
FailRelay(msgs::OnionErrorPacket),
123124
FailMalformed(([u8; 32], u16)),
124-
Fulfill(PaymentPreimage),
125+
Fulfill(PaymentPreimage, Option<AttributionData>),
125126
}
126127

127128
/// Represents the resolution status of an inbound HTLC.
@@ -220,7 +221,7 @@ impl From<&InboundHTLCState> for Option<InboundHTLCStateDetails> {
220221
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
221222
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailMalformed(_)) =>
222223
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
223-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_)) =>
224+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_, _)) =>
224225
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFulfill),
225226
}
226227
}
@@ -251,7 +252,7 @@ impl InboundHTLCState {
251252

252253
fn preimage(&self) -> Option<PaymentPreimage> {
253254
match self {
254-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage)) => Some(*preimage),
255+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage, _)) => Some(*preimage),
255256
_ => None,
256257
}
257258
}
@@ -5347,7 +5348,7 @@ impl<SP: Deref> FundedChannel<SP> where
53475348
// (see equivalent if condition there).
53485349
assert!(!self.context.channel_state.can_generate_new_commitment());
53495350
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
5350-
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
5351+
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, None, logger);
53515352
self.context.latest_monitor_update_id = mon_update_id;
53525353
if let UpdateFulfillFetch::NewClaim { msg, .. } = fulfill_resp {
53535354
assert!(!msg); // The HTLC must have ended up in the holding cell.
@@ -5356,7 +5357,7 @@ impl<SP: Deref> FundedChannel<SP> where
53565357

53575358
fn get_update_fulfill_htlc<L: Deref>(
53585359
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
5359-
payment_info: Option<PaymentClaimDetails>, logger: &L,
5360+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, logger: &L,
53605361
) -> UpdateFulfillFetch where L::Target: Logger {
53615362
// Either ChannelReady got set (which means it won't be unset) or there is no way any
53625363
// caller thought we could have something claimed (cause we wouldn't have accepted in an
@@ -5380,7 +5381,7 @@ impl<SP: Deref> FundedChannel<SP> where
53805381
match htlc.state {
53815382
InboundHTLCState::Committed => {},
53825383
InboundHTLCState::LocalRemoved(ref reason) => {
5383-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
5384+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
53845385
} else {
53855386
log_warn!(logger, "Have preimage and want to fulfill HTLC with payment hash {} we already failed against channel {}", &htlc.payment_hash, &self.context.channel_id());
53865387
debug_assert!(false, "Tried to fulfill an HTLC that was already failed");
@@ -5458,7 +5459,7 @@ impl<SP: Deref> FundedChannel<SP> where
54585459
return UpdateFulfillFetch::NewClaim { monitor_update, htlc_value_msat, msg: false };
54595460
}
54605461
log_trace!(logger, "Upgrading HTLC {} to LocalRemoved with a Fulfill in channel {}!", &htlc.payment_hash, &self.context.channel_id);
5461-
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone()));
5462+
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone(), attribution_data));
54625463
}
54635464

54645465
UpdateFulfillFetch::NewClaim {
@@ -5470,10 +5471,10 @@ impl<SP: Deref> FundedChannel<SP> where
54705471

54715472
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
54725473
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
5473-
payment_info: Option<PaymentClaimDetails>, logger: &L,
5474+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, logger: &L,
54745475
) -> UpdateFulfillCommitFetch where L::Target: Logger {
54755476
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
5476-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
5477+
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, attribution_data, logger) {
54775478
UpdateFulfillFetch::NewClaim { mut monitor_update, htlc_value_msat, msg } => {
54785479
// Even if we aren't supposed to let new monitor updates with commitment state
54795480
// updates run, we still need to push the preimage ChannelMonitorUpdateStep no
@@ -6201,7 +6202,7 @@ impl<SP: Deref> FundedChannel<SP> where
62016202
// We do not bother to track and include `payment_info` here, however.
62026203
let mut additional_monitor_update =
62036204
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } =
6204-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger)
6205+
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, None, logger) // TODO: Attr data from holding cell
62056206
{ monitor_update } else { unreachable!() };
62066207
update_fulfill_count += 1;
62076208
monitor_update.updates.append(&mut additional_monitor_update.updates);
@@ -6366,7 +6367,7 @@ impl<SP: Deref> FundedChannel<SP> where
63666367
pending_inbound_htlcs.retain(|htlc| {
63676368
if let &InboundHTLCState::LocalRemoved(ref reason) = &htlc.state {
63686369
log_trace!(logger, " ...removing inbound LocalRemoved {}", &htlc.payment_hash);
6369-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
6370+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
63706371
value_to_self_msat_diff += htlc.amount_msat as i64;
63716372
}
63726373
*expecting_peer_commitment_signed = true;
@@ -7139,11 +7140,12 @@ impl<SP: Deref> FundedChannel<SP> where
71397140
failure_code: failure_code.clone(),
71407141
});
71417142
},
7142-
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage) => {
7143+
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage, ref attribution_data) => {
71437144
update_fulfill_htlcs.push(msgs::UpdateFulfillHTLC {
71447145
channel_id: self.context.channel_id(),
71457146
htlc_id: htlc.htlc_id,
71467147
payment_preimage: payment_preimage.clone(),
7148+
attribution_data: attribution_data.clone(),
71477149
});
71487150
},
71497151
}
@@ -10642,7 +10644,7 @@ impl<SP: Deref> Writeable for FundedChannel<SP> where SP::Target: SignerProvider
1064210644
1u8.write(writer)?;
1064310645
(hash, code).write(writer)?;
1064410646
},
10645-
InboundHTLCRemovalReason::Fulfill(preimage) => {
10647+
InboundHTLCRemovalReason::Fulfill(preimage, _) => { // TODO: Persistence
1064610648
2u8.write(writer)?;
1064710649
preimage.write(writer)?;
1064810650
},
@@ -11003,7 +11005,7 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, &'c Channel
1100311005
attribution_data: None,
1100411006
}),
1100511007
1 => InboundHTLCRemovalReason::FailMalformed(Readable::read(reader)?),
11006-
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?),
11008+
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?, None), // TODO: Persistence
1100711009
_ => return Err(DecodeError::InvalidValue),
1100811010
};
1100911011
InboundHTLCState::LocalRemoved(reason)

lightning/src/ln/channelmanager.rs

Lines changed: 52 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ use crate::types::features::Bolt11InvoiceFeatures;
5959
use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route};
6060
use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data};
6161
use crate::ln::msgs;
62-
use crate::ln::onion_utils::{self};
62+
use crate::ln::onion_utils::{self, build_failure_packet, process_onion_success, HMAC_COUNT, HMAC_LEN, HOLD_TIME_LEN, MAX_HOPS};
6363
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
6464
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, DecodeError, LightningError, MessageSendEvent};
6565
#[cfg(test)]
@@ -88,6 +88,8 @@ use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, Rea
8888
use crate::util::logger::{Level, Logger, WithContext};
8989
use crate::util::errors::APIError;
9090

91+
use crate::ln::onion_utils::AttributionData;
92+
9193
#[cfg(async_payments)] use {
9294
crate::offers::offer::Amount,
9395
crate::offers::static_invoice::{DEFAULT_RELATIVE_EXPIRY as STATIC_INVOICE_DEFAULT_RELATIVE_EXPIRY, StaticInvoice, StaticInvoiceBuilder},
@@ -7239,8 +7241,19 @@ where
72397241
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
72407242
}
72417243
});
7244+
7245+
log_info!(self.logger, "ONLY ONCE");
7246+
7247+
let mut attribution_data = AttributionData::new();
7248+
let hold_time_bytes: [u8; 4] = (100 as u32).to_be_bytes();
7249+
attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7250+
attribution_data.add_hmacs(&htlc.prev_hop.incoming_packet_shared_secret, &[]);
7251+
attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7252+
72427253
self.claim_funds_from_hop(
72437254
htlc.prev_hop, payment_preimage, payment_info.clone(),
7255+
Some(attribution_data),
7256+
72447257
|_, definitely_duplicate| {
72457258
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
72467259
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7269,7 +7282,7 @@ where
72697282
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72707283
>(
72717284
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7272-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7285+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, completion_action: ComplFunc,
72737286
) {
72747287
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
72757288
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7282,15 +7295,17 @@ where
72827295
channel_id: prev_hop.channel_id,
72837296
htlc_id: prev_hop.htlc_id,
72847297
};
7285-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7298+
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, attribution_data, completion_action)
72867299
}
72877300

72887301
fn claim_mpp_part<
72897302
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
72907303
>(
72917304
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7292-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7305+
payment_info: Option<PaymentClaimDetails>, attribution_data: Option<AttributionData>, completion_action: ComplFunc,
72937306
) {
7307+
log_info!(self.logger, "claim_mpp_part called");
7308+
72947309
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
72957310

72967311
// If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -7322,7 +7337,7 @@ where
73227337
if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
73237338
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
73247339
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7325-
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7340+
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
73267341

73277342
match fulfill_res {
73287343
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7474,9 +7489,17 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74747489
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
74757490
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
74767491
next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7492+
attribution_data: Option<&AttributionData>,
74777493
) {
7494+
log_info!(self.logger, "claim_funds_internal - ONLY NON FINAL");
74787495
match source {
74797496
HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7497+
// Log attribution data.
7498+
log_info!(self.logger, "SENDER: Attribution data: {:?}", attribution_data);
7499+
process_onion_success(&self.secp_ctx, &self.logger, &path,
7500+
&session_priv, None,
7501+
attribution_data.unwrap().clone());
7502+
74807503
debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
74817504
"We don't support claim_htlc claims during startup - monitors may not be available yet");
74827505
debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7493,7 +7516,25 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
74937516
let prev_user_channel_id = hop_data.user_channel_id;
74947517
let prev_node_id = hop_data.counterparty_node_id;
74957518
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7496-
self.claim_funds_from_hop(hop_data, payment_preimage, None,
7519+
7520+
let attribution_data = attribution_data
7521+
.map(|attribution_data| {
7522+
let mut attribution_data = attribution_data.clone();
7523+
7524+
attribution_data.shift_right();
7525+
7526+
// Fake hold time here.
7527+
let hold_time = hop_data.incoming_packet_shared_secret[0];
7528+
7529+
let hold_time_bytes: [u8; 4] = (10 + hold_time as u32).to_be_bytes();
7530+
attribution_data.hold_times[..HOLD_TIME_LEN].copy_from_slice(&hold_time_bytes);
7531+
attribution_data.add_hmacs(&hop_data.incoming_packet_shared_secret, &[]);
7532+
attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7533+
7534+
attribution_data
7535+
});
7536+
7537+
self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
74977538
|htlc_claim_value_msat, definitely_duplicate| {
74987539
let chan_to_release = Some(EventUnblockedChannel {
74997540
counterparty_node_id: next_channel_counterparty_node_id,
@@ -8937,7 +8978,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
89378978
};
89388979
self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
89398980
Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
8940-
funding_txo, msg.channel_id, Some(next_user_channel_id),
8981+
funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
89418982
);
89428983

89438984
Ok(())
@@ -9638,6 +9679,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96389679
htlc_update.source, preimage,
96399680
htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
96409681
false, counterparty_node_id, funding_outpoint, channel_id, None,
9682+
None,
96419683
);
96429684
} else {
96439685
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -12154,6 +12196,7 @@ where
1215412196
}
1215512197

1215612198
fn handle_update_fulfill_htlc(&self, counterparty_node_id: PublicKey, msg: &msgs::UpdateFulfillHTLC) {
12199+
log_info!(self.logger, "Received update_fulfill_htlc: {:?}", msg);
1215712200
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
1215812201
let _ = handle_error!(self, self.internal_update_fulfill_htlc(&counterparty_node_id, msg), counterparty_node_id);
1215912202
}
@@ -14905,7 +14948,7 @@ where
1490514948
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1490614949
// no need to worry about getting it into others.
1490714950
channel_manager.claim_mpp_part(
14908-
part.into(), payment_preimage, None,
14951+
part.into(), payment_preimage, None, None,
1490914952
|_, _|
1491014953
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1491114954
);
@@ -15011,7 +15054,7 @@ where
1501115054
// channel is closed we just assume that it probably came from an on-chain claim.
1501215055
channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1501315056
downstream_closed, true, downstream_node_id, downstream_funding,
15014-
downstream_channel_id, None
15057+
downstream_channel_id, None, None,
1501515058
);
1501615059
}
1501715060

lightning/src/ln/htlc_reserve_unit_tests.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1785,6 +1785,7 @@ pub fn test_update_fulfill_htlc_bolt2_update_fulfill_htlc_before_commitment() {
17851785
channel_id: chan.2,
17861786
htlc_id: 0,
17871787
payment_preimage: our_payment_preimage,
1788+
attribution_data: None,
17881789
};
17891790

17901791
nodes[0].node.handle_update_fulfill_htlc(node_b_id, &update_msg);

lightning/src/ln/msgs.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -735,6 +735,9 @@ pub struct UpdateFulfillHTLC {
735735
pub htlc_id: u64,
736736
/// The pre-image of the payment hash, allowing HTLC redemption
737737
pub payment_preimage: PaymentPreimage,
738+
739+
/// Optional field for the attribution data that allows the sender to pinpoint the failing node under all conditions
740+
pub attribution_data: Option<AttributionData>,
738741
}
739742

740743
/// A [`peer_storage`] message that can be sent to or received from a peer.
@@ -3076,7 +3079,10 @@ impl_writeable_msg!(UpdateFulfillHTLC, {
30763079
channel_id,
30773080
htlc_id,
30783081
payment_preimage
3079-
}, {});
3082+
}, {
3083+
// Specified TLV key 1 plus 100 during testing phase.
3084+
(101, attribution_data, option)
3085+
});
30803086

30813087
impl_writeable_msg!(PeerStorage, { data }, {});
30823088

@@ -5552,6 +5558,7 @@ mod tests {
55525558
channel_id: ChannelId::from_bytes([2; 32]),
55535559
htlc_id: 2316138423780173,
55545560
payment_preimage: PaymentPreimage([1; 32]),
5561+
attribution_data: None,
55555562
};
55565563
let encoded_value = update_fulfill_htlc.encode();
55575564
let target_value = <Vec<u8>>::from_hex("020202020202020202020202020202020202020202020202020202020202020200083a840000034d0101010101010101010101010101010101010101010101010101010101010101").unwrap();

0 commit comments

Comments
 (0)