Don't double-claim MPP payments that are pending on multiple chans

On startup, we walk the preimages and payment HTLC sets on all our
`ChannelMonitor`s, re-claiming all payments which we recently
claimed. This ensures all HTLCs in any claimed payments are claimed
across all channels.

In doing so, we expect to see the same payment multiple times,
after all it may have been received as multiple HTLCs across
multiple channels. In such cases, there's no reason to redundantly
claim the same set of HTLCs again and again. In the current code,
doing so may lead to redundant `PaymentClaimed` events, and in a
coming commit will instead cause an assertion failure.
This commit is contained in:
Matt Corallo 2024-11-15 22:32:47 +00:00
parent e938ed74bb
commit 260f8759b0
2 changed files with 11 additions and 2 deletions

View file

@ -1185,7 +1185,7 @@ impl From<&MPPClaimHTLCSource> for HTLCClaimSource {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
/// The source of an HTLC which is being claimed as a part of an incoming payment. Each part is
/// tracked in [`PendingMPPClaim`] as well as in [`ChannelMonitor`]s, so that it can be converted
/// to an [`HTLCClaimSource`] for claim replays on startup.
@ -14159,10 +14159,18 @@ where
testing_dnssec_proof_offer_resolution_override: Mutex::new(new_hash_map()),
};
let mut processed_claims: HashSet<Vec<MPPClaimHTLCSource>> = new_hash_set();
for (_, monitor) in args.channel_monitors.iter() {
for (payment_hash, (payment_preimage, payment_claims)) in monitor.get_stored_preimages() {
if !payment_claims.is_empty() {
for payment_claim in payment_claims {
if processed_claims.contains(&payment_claim.mpp_parts) {
// We might get the same payment a few times from different channels
// that the MPP payment was received using. There's no point in trying
// to claim the same payment again and again, so we check if the HTLCs
// are the same and skip the payment here.
continue;
}
if payment_claim.mpp_parts.is_empty() {
return Err(DecodeError::InvalidValue);
}
@ -14217,6 +14225,7 @@ where
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
);
}
processed_claims.insert(payment_claim.mpp_parts);
}
} else {
let per_peer_state = channel_manager.per_peer_state.read().unwrap();

View file

@ -900,7 +900,7 @@ fn do_test_partial_claim_before_restart(persist_both_monitors: bool) {
if persist_both_monitors {
if let Event::ChannelClosed { reason: ClosureReason::OutdatedChannelManager, .. } = events[2] { } else { panic!(); }
if let Event::PaymentClaimed { amount_msat: 15_000_000, .. } = events[3] { } else { panic!(); }
check_added_monitors(&nodes[3], 6);
check_added_monitors(&nodes[3], 4);
} else {
if let Event::PaymentClaimed { amount_msat: 15_000_000, .. } = events[2] { } else { panic!(); }
check_added_monitors(&nodes[3], 3);