Skip to content

Commit f80155e

Browse files
committed
Handle claim result event generation in claim_funds_from_hop
Currently `claim_funds` and `claim_funds_internal` call `claim_funds_from_hop` and then surface and `Event` to the user informing them of the forwarded/claimed payment based on it's result. In both places we assume that a claim "completed" even if a monitor update is being done async. Instead, here we push that event generation through a `MonitorUpdateCompletionAction` and a call to `handle_monitor_update_completion_action`. This will allow us to hold the event(s) until async monitor updates complete in the future.
1 parent 76ccac1 commit f80155e

File tree

1 file changed

+43
-50
lines changed

1 file changed

+43
-50
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 43 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -4303,7 +4303,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43034303
let mut expected_amt_msat = None;
43044304
let mut valid_mpp = true;
43054305
let mut errs = Vec::new();
4306-
let mut claimed_any_htlcs = false;
43074306
let mut channel_state = Some(self.channel_state.lock().unwrap());
43084307
for htlc in sources.iter() {
43094308
let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
@@ -4353,13 +4352,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43534352
if valid_mpp {
43544353
for htlc in sources.drain(..) {
43554354
if channel_state.is_none() { channel_state = Some(self.channel_state.lock().unwrap()); }
4356-
match self.claim_funds_from_hop(channel_state.take().unwrap(), htlc.prev_hop, payment_preimage) {
4355+
match self.claim_funds_from_hop(channel_state.take().unwrap(), htlc.prev_hop, payment_preimage,
4356+
|_| Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash }))
4357+
{
43574358
ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) => {
43584359
if let msgs::ErrorAction::IgnoreError = err.err.action {
43594360
// We got a temporary failure updating monitor, but will claim the
43604361
// HTLC when the monitor updating is restored (or on chain).
43614362
log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
4362-
claimed_any_htlcs = true;
43634363
} else { errs.push((pk, err)); }
43644364
},
43654365
ClaimFundsFromHop::PrevHopForceClosed => {
@@ -4374,7 +4374,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43744374
// available to be claimed. Thus, it does not make sense to set
43754375
// `claimed_any_htlcs`.
43764376
},
4377-
ClaimFundsFromHop::Success(_) => claimed_any_htlcs = true,
4377+
ClaimFundsFromHop::Success(_) => {},
43784378
}
43794379
}
43804380
}
@@ -4388,14 +4388,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43884388
let receiver = HTLCDestination::FailedPayment { payment_hash };
43894389
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
43904390
}
4391-
}
4392-
4393-
let ClaimingPayment { amount_msat, payment_purpose: purpose, receiver_node_id } =
4394-
self.claimable_payments.lock().unwrap().pending_claimed_payments.remove(&payment_hash).unwrap();
4395-
if claimed_any_htlcs {
4396-
self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
4397-
payment_hash, purpose, amount_msat, receiver_node_id: Some(receiver_node_id),
4398-
});
4391+
self.claimable_payments.lock().unwrap().pending_claimed_payments.remove(&payment_hash);
43994392
}
44004393

44014394
// Now we can handle any errors which were generated.
@@ -4405,12 +4398,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44054398
}
44064399
}
44074400

4408-
fn claim_funds_from_hop(&self, mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage) -> ClaimFundsFromHop {
4401+
fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>) -> Option<MonitorUpdateCompletionAction>>(&self,
4402+
mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>,
4403+
prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage, completion_action: ComplFunc)
4404+
-> ClaimFundsFromHop {
44094405
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
44104406

44114407
let chan_id = prev_hop.outpoint.to_channel_id();
44124408
let channel_state = &mut *channel_state_lock;
44134409
if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(chan_id) {
4410+
let counterparty_node_id = chan.get().get_counterparty_node_id();
44144411
match chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger) {
44154412
Ok(msgs_monitor_option) => {
44164413
if let UpdateFulfillCommitFetch::NewClaim { msgs, htlc_value_msat, monitor_update } = msgs_monitor_option {
@@ -4420,10 +4417,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44204417
log_given_level!(self.logger, if e == ChannelMonitorUpdateStatus::PermanentFailure { Level::Error } else { Level::Debug },
44214418
"Failed to update channel monitor with preimage {:?}: {:?}",
44224419
payment_preimage, e);
4420+
let err = handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err();
4421+
mem::drop(channel_state_lock);
4422+
self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
44234423
return ClaimFundsFromHop::MonitorUpdateFail(
4424-
chan.get().get_counterparty_node_id(),
4425-
handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err(),
4426-
Some(htlc_value_msat)
4424+
counterparty_node_id, err, Some(htlc_value_msat)
44274425
);
44284426
}
44294427
}
@@ -4442,6 +4440,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44424440
}
44434441
});
44444442
}
4443+
mem::drop(channel_state_lock);
4444+
self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
44454445
return ClaimFundsFromHop::Success(htlc_value_msat);
44464446
} else {
44474447
return ClaimFundsFromHop::DuplicateClaim;
@@ -4456,11 +4456,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44564456
payment_preimage, e);
44574457
},
44584458
}
4459-
let counterparty_node_id = chan.get().get_counterparty_node_id();
44604459
let (drop, res) = convert_chan_err!(self, e, chan.get_mut(), &chan_id);
44614460
if drop {
44624461
chan.remove_entry();
44634462
}
4463+
mem::drop(channel_state_lock);
4464+
self.handle_monitor_update_completion_actions(completion_action(None));
44644465
return ClaimFundsFromHop::MonitorUpdateFail(counterparty_node_id, res, None);
44654466
},
44664467
}
@@ -4482,6 +4483,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44824483
log_error!(self.logger, "Critical error: failed to update channel monitor with preimage {:?}: {:?}",
44834484
payment_preimage, update_res);
44844485
}
4486+
mem::drop(channel_state_lock);
4487+
// Note that we do process the completion action here. This totally could be a
4488+
// duplicate claim, but we have no way of knowing without interrogating the
4489+
// `ChannelMonitor` we've provided the above update to. Instead, note that `Event`s are
4490+
// generally always allowed to be duplicative (and it's specifically noted in
4491+
// `PaymentForwarded`).
4492+
self.handle_monitor_update_completion_actions(completion_action(None));
44854493
return ClaimFundsFromHop::PrevHopForceClosed
44864494
}
44874495
}
@@ -4556,43 +4564,28 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
45564564
},
45574565
HTLCSource::PreviousHopData(hop_data) => {
45584566
let prev_outpoint = hop_data.outpoint;
4559-
let res = self.claim_funds_from_hop(channel_state_lock, hop_data, payment_preimage);
4560-
let claimed_htlc = if let ClaimFundsFromHop::DuplicateClaim = res { false } else { true };
4561-
let htlc_claim_value_msat = match res {
4562-
ClaimFundsFromHop::MonitorUpdateFail(_, _, amt_opt) => amt_opt,
4563-
ClaimFundsFromHop::Success(amt) => Some(amt),
4564-
_ => None,
4565-
};
4566-
if let ClaimFundsFromHop::PrevHopForceClosed = res {
4567-
// Note that we do *not* set `claimed_htlc` to false here. In fact, this
4568-
// totally could be a duplicate claim, but we have no way of knowing
4569-
// without interrogating the `ChannelMonitor` we've provided the above
4570-
// update to. Instead, we simply document in `PaymentForwarded` that this
4571-
// can happen.
4572-
}
4567+
let res = self.claim_funds_from_hop(channel_state_lock, hop_data, payment_preimage,
4568+
|htlc_claim_value_msat| {
4569+
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
4570+
let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
4571+
Some(claimed_htlc_value - forwarded_htlc_value)
4572+
} else { None };
4573+
4574+
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4575+
let next_channel_id = Some(next_channel_id);
4576+
4577+
Some(MonitorUpdateCompletionAction::EmitEvent { event: events::Event::PaymentForwarded {
4578+
fee_earned_msat,
4579+
claim_from_onchain_tx: from_onchain,
4580+
prev_channel_id,
4581+
next_channel_id,
4582+
}})
4583+
} else { None }
4584+
});
45734585
if let ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) = res {
45744586
let result: Result<(), _> = Err(err);
45754587
let _ = handle_error!(self, result, pk);
45764588
}
4577-
4578-
if claimed_htlc {
4579-
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
4580-
let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
4581-
Some(claimed_htlc_value - forwarded_htlc_value)
4582-
} else { None };
4583-
4584-
let mut pending_events = self.pending_events.lock().unwrap();
4585-
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4586-
let next_channel_id = Some(next_channel_id);
4587-
4588-
pending_events.push(events::Event::PaymentForwarded {
4589-
fee_earned_msat,
4590-
claim_from_onchain_tx: from_onchain,
4591-
prev_channel_id,
4592-
next_channel_id,
4593-
});
4594-
}
4595-
}
45964589
},
45974590
}
45984591
}

0 commit comments

Comments
 (0)