Integrate PackageTemplate

This commit replaces InputMaterial in both ChannelMonitor/
OnchainTxHandler.

This doesn't change behavior.
This commit is contained in:
Antoine Riard 2021-05-20 12:44:41 -04:00
parent 2d451b8622
commit 5ccb07554b
4 changed files with 159 additions and 628 deletions

View file

@ -22,7 +22,6 @@
use bitcoin::blockdata::block::{Block, BlockHeader};
use bitcoin::blockdata::transaction::{TxOut,Transaction};
use bitcoin::blockdata::transaction::OutPoint as BitcoinOutPoint;
use bitcoin::blockdata::script::{Script, Builder};
use bitcoin::blockdata::opcodes;
@ -40,7 +39,7 @@ use ln::chan_utils;
use ln::chan_utils::{CounterpartyCommitmentSecrets, HTLCOutputInCommitment, HTLCType, ChannelTransactionParameters, HolderCommitmentTransaction};
use ln::channelmanager::{BestBlock, HTLCSource};
use ln::onchaintx::OnchainTxHandler;
use ln::package::InputDescriptors;
use ln::package::{CounterpartyOfferedHTLCOutput, CounterpartyReceivedHTLCOutput, HolderFundingOutput, HolderHTLCOutput, PackageSolvingData, PackageTemplate, RevokedOutput, RevokedHTLCOutput};
use chain;
use chain::WatchedOutput;
use chain::chaininterface::{BroadcasterInterface, FeeEstimator};
@ -322,151 +321,6 @@ impl Readable for CounterpartyCommitmentTransaction {
}
}
/// When ChannelMonitor discovers an onchain outpoint being a step of a channel and that it needs
/// to generate a tx to push channel state forward, we cache outpoint-solving tx material to build
/// a new bumped one in case of lenghty confirmation delay
#[derive(Clone, PartialEq)]
pub(crate) enum InputMaterial {
Revoked {
per_commitment_point: PublicKey,
counterparty_delayed_payment_base_key: PublicKey,
counterparty_htlc_base_key: PublicKey,
per_commitment_key: SecretKey,
input_descriptor: InputDescriptors,
amount: u64,
htlc: Option<HTLCOutputInCommitment>,
on_counterparty_tx_csv: u16,
},
CounterpartyHTLC {
per_commitment_point: PublicKey,
counterparty_delayed_payment_base_key: PublicKey,
counterparty_htlc_base_key: PublicKey,
preimage: Option<PaymentPreimage>,
htlc: HTLCOutputInCommitment
},
HolderHTLC {
preimage: Option<PaymentPreimage>,
amount: u64,
},
Funding {
funding_redeemscript: Script,
}
}
impl Writeable for InputMaterial {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ::std::io::Error> {
match self {
&InputMaterial::Revoked { ref per_commitment_point, ref counterparty_delayed_payment_base_key, ref counterparty_htlc_base_key, ref per_commitment_key, ref input_descriptor, ref amount, ref htlc, ref on_counterparty_tx_csv} => {
writer.write_all(&[0; 1])?;
per_commitment_point.write(writer)?;
counterparty_delayed_payment_base_key.write(writer)?;
counterparty_htlc_base_key.write(writer)?;
writer.write_all(&per_commitment_key[..])?;
input_descriptor.write(writer)?;
writer.write_all(&byte_utils::be64_to_array(*amount))?;
htlc.write(writer)?;
on_counterparty_tx_csv.write(writer)?;
},
&InputMaterial::CounterpartyHTLC { ref per_commitment_point, ref counterparty_delayed_payment_base_key, ref counterparty_htlc_base_key, ref preimage, ref htlc} => {
writer.write_all(&[1; 1])?;
per_commitment_point.write(writer)?;
counterparty_delayed_payment_base_key.write(writer)?;
counterparty_htlc_base_key.write(writer)?;
preimage.write(writer)?;
htlc.write(writer)?;
},
&InputMaterial::HolderHTLC { ref preimage, ref amount } => {
writer.write_all(&[2; 1])?;
preimage.write(writer)?;
writer.write_all(&byte_utils::be64_to_array(*amount))?;
},
&InputMaterial::Funding { ref funding_redeemscript } => {
writer.write_all(&[3; 1])?;
funding_redeemscript.write(writer)?;
}
}
Ok(())
}
}
impl Readable for InputMaterial {
fn read<R: ::std::io::Read>(reader: &mut R) -> Result<Self, DecodeError> {
let input_material = match <u8 as Readable>::read(reader)? {
0 => {
let per_commitment_point = Readable::read(reader)?;
let counterparty_delayed_payment_base_key = Readable::read(reader)?;
let counterparty_htlc_base_key = Readable::read(reader)?;
let per_commitment_key = Readable::read(reader)?;
let input_descriptor = Readable::read(reader)?;
let amount = Readable::read(reader)?;
let htlc = Readable::read(reader)?;
let on_counterparty_tx_csv = Readable::read(reader)?;
InputMaterial::Revoked {
per_commitment_point,
counterparty_delayed_payment_base_key,
counterparty_htlc_base_key,
per_commitment_key,
input_descriptor,
amount,
htlc,
on_counterparty_tx_csv
}
},
1 => {
let per_commitment_point = Readable::read(reader)?;
let counterparty_delayed_payment_base_key = Readable::read(reader)?;
let counterparty_htlc_base_key = Readable::read(reader)?;
let preimage = Readable::read(reader)?;
let htlc = Readable::read(reader)?;
InputMaterial::CounterpartyHTLC {
per_commitment_point,
counterparty_delayed_payment_base_key,
counterparty_htlc_base_key,
preimage,
htlc
}
},
2 => {
let preimage = Readable::read(reader)?;
let amount = Readable::read(reader)?;
InputMaterial::HolderHTLC {
preimage,
amount,
}
},
3 => {
InputMaterial::Funding {
funding_redeemscript: Readable::read(reader)?,
}
}
_ => return Err(DecodeError::InvalidValue),
};
Ok(input_material)
}
}
/// ClaimRequest is a descriptor structure to communicate between detection
/// and reaction module. They are generated by ChannelMonitor while parsing
/// onchain txn leaked from a channel and handed over to OnchainTxHandler which
/// is responsible for opportunistic aggregation, selecting and enforcing
/// bumping logic, building and signing transactions.
pub(crate) struct ClaimRequest {
// Block height before which claiming is exclusive to one party,
// after reaching it, claiming may be contentious.
pub(crate) absolute_timelock: u32,
// Timeout tx must have nLocktime set which means aggregating multiple
// ones must take the higher nLocktime among them to satisfy all of them.
// Sadly it has few pitfalls, a) it takes longuer to get fund back b) CLTV_DELTA
// of a sooner-HTLC could be swallowed by the highest nLocktime of the HTLC set.
// Do simplify we mark them as non-aggregable.
pub(crate) aggregable: bool,
// Basic bitcoin outpoint (txid, vout)
pub(crate) outpoint: BitcoinOutPoint,
// Following outpoint type, set of data needed to generate transaction digest
// and satisfy witness program.
pub(crate) witness_data: InputMaterial
}
/// An entry for an [`OnchainEvent`], stating the block height when the event was observed and the
/// transaction causing it.
///
@ -1554,10 +1408,10 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
// *we* sign a holder commitment transaction, not when e.g. a watchtower broadcasts one of our
// holder commitment transactions.
if self.broadcasted_holder_revokable_script.is_some() {
let (claim_reqs, _) = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx);
let (claim_reqs, _) = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx, 0);
self.onchain_tx_handler.update_claims_view(&Vec::new(), claim_reqs, None, broadcaster, fee_estimator, logger);
if let Some(ref tx) = self.prev_holder_signed_commitment_tx {
let (claim_reqs, _) = self.get_broadcasted_holder_claims(&tx);
let (claim_reqs, _) = self.get_broadcasted_holder_claims(&tx, 0);
self.onchain_tx_handler.update_claims_view(&Vec::new(), claim_reqs, None, broadcaster, fee_estimator, logger);
}
}
@ -1684,7 +1538,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
/// HTLC-Success/HTLC-Timeout transactions.
/// Return updates for HTLC pending in the channel and failed automatically by the broadcast of
/// revoked counterparty commitment tx
fn check_spend_counterparty_transaction<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) -> (Vec<ClaimRequest>, TransactionOutputs) where L::Target: Logger {
fn check_spend_counterparty_transaction<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) -> (Vec<PackageTemplate>, TransactionOutputs) where L::Target: Logger {
// Most secp and related errors trying to create keys means we have no hope of constructing
// a spend transaction...so we return no transactions to broadcast
let mut claimable_outpoints = Vec::new();
@ -1716,8 +1570,9 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
// First, process non-htlc outputs (to_holder & to_counterparty)
for (idx, outp) in tx.output.iter().enumerate() {
if outp.script_pubkey == revokeable_p2wsh {
let witness_data = InputMaterial::Revoked { per_commitment_point, counterparty_delayed_payment_base_key: self.counterparty_tx_cache.counterparty_delayed_payment_base_key, counterparty_htlc_base_key: self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, input_descriptor: InputDescriptors::RevokedOutput, amount: outp.value, htlc: None, on_counterparty_tx_csv: self.counterparty_tx_cache.on_counterparty_tx_csv};
claimable_outpoints.push(ClaimRequest { absolute_timelock: height + self.counterparty_tx_cache.on_counterparty_tx_csv as u32, aggregable: true, outpoint: BitcoinOutPoint { txid: commitment_txid, vout: idx as u32 }, witness_data});
let revk_outp = RevokedOutput::build(per_commitment_point, self.counterparty_tx_cache.counterparty_delayed_payment_base_key, self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, outp.value, self.counterparty_tx_cache.on_counterparty_tx_csv);
let justice_package = PackageTemplate::build_package(commitment_txid, idx as u32, PackageSolvingData::RevokedOutput(revk_outp), height + self.counterparty_tx_cache.on_counterparty_tx_csv as u32, true, height);
claimable_outpoints.push(justice_package);
}
}
@ -1729,8 +1584,9 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
tx.output[transaction_output_index as usize].value != htlc.amount_msat / 1000 {
return (claimable_outpoints, (commitment_txid, watch_outputs)); // Corrupted per_commitment_data, fuck this user
}
let witness_data = InputMaterial::Revoked { per_commitment_point, counterparty_delayed_payment_base_key: self.counterparty_tx_cache.counterparty_delayed_payment_base_key, counterparty_htlc_base_key: self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, input_descriptor: if htlc.offered { InputDescriptors::RevokedOfferedHTLC } else { InputDescriptors::RevokedReceivedHTLC }, amount: tx.output[transaction_output_index as usize].value, htlc: Some(htlc.clone()), on_counterparty_tx_csv: self.counterparty_tx_cache.on_counterparty_tx_csv};
claimable_outpoints.push(ClaimRequest { absolute_timelock: htlc.cltv_expiry, aggregable: true, outpoint: BitcoinOutPoint { txid: commitment_txid, vout: transaction_output_index }, witness_data });
let revk_htlc_outp = RevokedHTLCOutput::build(per_commitment_point, self.counterparty_tx_cache.counterparty_delayed_payment_base_key, self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, htlc.amount_msat / 1000, htlc.clone());
let justice_package = PackageTemplate::build_package(commitment_txid, transaction_output_index, PackageSolvingData::RevokedHTLCOutput(revk_htlc_outp), htlc.cltv_expiry, true, height);
claimable_outpoints.push(justice_package);
}
}
}
@ -1852,8 +1708,8 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
(claimable_outpoints, (commitment_txid, watch_outputs))
}
fn get_counterparty_htlc_output_claim_reqs(&self, commitment_number: u64, commitment_txid: Txid, tx: Option<&Transaction>) -> Vec<ClaimRequest> {
let mut claims = Vec::new();
fn get_counterparty_htlc_output_claim_reqs(&self, commitment_number: u64, commitment_txid: Txid, tx: Option<&Transaction>) -> Vec<PackageTemplate> {
let mut claimable_outpoints = Vec::new();
if let Some(htlc_outputs) = self.counterparty_claimable_outpoints.get(&commitment_txid) {
if let Some(revocation_points) = self.their_cur_revocation_points {
let revocation_point_option =
@ -1872,30 +1728,26 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
if let Some(transaction) = tx {
if transaction_output_index as usize >= transaction.output.len() ||
transaction.output[transaction_output_index as usize].value != htlc.amount_msat / 1000 {
return claims; // Corrupted per_commitment_data, fuck this user
return claimable_outpoints; // Corrupted per_commitment_data, fuck this user
}
}
let preimage =
if htlc.offered {
if let Some(p) = self.payment_preimages.get(&htlc.payment_hash) {
Some(*p)
} else { None }
} else { None };
let aggregable = if !htlc.offered { false } else { true };
let preimage = if htlc.offered { if let Some(p) = self.payment_preimages.get(&htlc.payment_hash) { Some(*p) } else { None } } else { None };
if preimage.is_some() || !htlc.offered {
let witness_data = InputMaterial::CounterpartyHTLC { per_commitment_point: *revocation_point, counterparty_delayed_payment_base_key: self.counterparty_tx_cache.counterparty_delayed_payment_base_key, counterparty_htlc_base_key: self.counterparty_tx_cache.counterparty_htlc_base_key, preimage, htlc: htlc.clone() };
claims.push(ClaimRequest { absolute_timelock: htlc.cltv_expiry, aggregable, outpoint: BitcoinOutPoint { txid: commitment_txid, vout: transaction_output_index }, witness_data });
let counterparty_htlc_outp = if htlc.offered { PackageSolvingData::CounterpartyOfferedHTLCOutput(CounterpartyOfferedHTLCOutput::build(*revocation_point, self.counterparty_tx_cache.counterparty_delayed_payment_base_key, self.counterparty_tx_cache.counterparty_htlc_base_key, preimage.unwrap(), htlc.clone())) } else { PackageSolvingData::CounterpartyReceivedHTLCOutput(CounterpartyReceivedHTLCOutput::build(*revocation_point, self.counterparty_tx_cache.counterparty_delayed_payment_base_key, self.counterparty_tx_cache.counterparty_htlc_base_key, htlc.clone())) };
let aggregation = if !htlc.offered { false } else { true };
let counterparty_package = PackageTemplate::build_package(commitment_txid, transaction_output_index, counterparty_htlc_outp, htlc.cltv_expiry,aggregation, 0);
claimable_outpoints.push(counterparty_package);
}
}
}
}
}
}
claims
claimable_outpoints
}
/// Attempts to claim a counterparty HTLC-Success/HTLC-Timeout's outputs using the revocation key
fn check_spend_counterparty_htlc<L: Deref>(&mut self, tx: &Transaction, commitment_number: u64, height: u32, logger: &L) -> (Vec<ClaimRequest>, Option<TransactionOutputs>) where L::Target: Logger {
fn check_spend_counterparty_htlc<L: Deref>(&mut self, tx: &Transaction, commitment_number: u64, height: u32, logger: &L) -> (Vec<PackageTemplate>, Option<TransactionOutputs>) where L::Target: Logger {
let htlc_txid = tx.txid();
if tx.input.len() != 1 || tx.output.len() != 1 || tx.input[0].witness.len() != 5 {
return (Vec::new(), None)
@ -1915,16 +1767,17 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
let per_commitment_point = PublicKey::from_secret_key(&self.secp_ctx, &per_commitment_key);
log_trace!(logger, "Counterparty HTLC broadcast {}:{}", htlc_txid, 0);
let witness_data = InputMaterial::Revoked { per_commitment_point, counterparty_delayed_payment_base_key: self.counterparty_tx_cache.counterparty_delayed_payment_base_key, counterparty_htlc_base_key: self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, input_descriptor: InputDescriptors::RevokedOutput, amount: tx.output[0].value, htlc: None, on_counterparty_tx_csv: self.counterparty_tx_cache.on_counterparty_tx_csv };
let claimable_outpoints = vec!(ClaimRequest { absolute_timelock: height + self.counterparty_tx_cache.on_counterparty_tx_csv as u32, aggregable: true, outpoint: BitcoinOutPoint { txid: htlc_txid, vout: 0}, witness_data });
let revk_outp = RevokedOutput::build(per_commitment_point, self.counterparty_tx_cache.counterparty_delayed_payment_base_key, self.counterparty_tx_cache.counterparty_htlc_base_key, per_commitment_key, tx.output[0].value, self.counterparty_tx_cache.on_counterparty_tx_csv);
let justice_package = PackageTemplate::build_package(htlc_txid, 0, PackageSolvingData::RevokedOutput(revk_outp), height + self.counterparty_tx_cache.on_counterparty_tx_csv as u32, true, height);
let claimable_outpoints = vec!(justice_package);
let outputs = vec![(0, tx.output[0].clone())];
(claimable_outpoints, Some((htlc_txid, outputs)))
}
// Returns (1) `ClaimRequest`s that can be given to the OnChainTxHandler, so that the handler can
// Returns (1) `PackageTemplate`s that can be given to the OnChainTxHandler, so that the handler can
// broadcast transactions claiming holder HTLC commitment outputs and (2) a holder revokable
// script so we can detect whether a holder transaction has been seen on-chain.
fn get_broadcasted_holder_claims(&self, holder_tx: &HolderSignedTx) -> (Vec<ClaimRequest>, Option<(Script, PublicKey, PublicKey)>) {
fn get_broadcasted_holder_claims(&self, holder_tx: &HolderSignedTx, height: u32) -> (Vec<PackageTemplate>, Option<(Script, PublicKey, PublicKey)>) {
let mut claim_requests = Vec::with_capacity(holder_tx.htlc_outputs.len());
let redeemscript = chan_utils::get_revokeable_redeemscript(&holder_tx.revocation_key, self.on_holder_tx_csv, &holder_tx.delayed_payment_key);
@ -1932,18 +1785,16 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
for &(ref htlc, _, _) in holder_tx.htlc_outputs.iter() {
if let Some(transaction_output_index) = htlc.transaction_output_index {
claim_requests.push(ClaimRequest { absolute_timelock: ::core::u32::MAX, aggregable: false, outpoint: BitcoinOutPoint { txid: holder_tx.txid, vout: transaction_output_index as u32 },
witness_data: InputMaterial::HolderHTLC {
preimage: if !htlc.offered {
if let Some(preimage) = self.payment_preimages.get(&htlc.payment_hash) {
Some(preimage.clone())
} else {
// We can't build an HTLC-Success transaction without the preimage
continue;
}
} else { None },
amount: htlc.amount_msat,
}});
let htlc_output = HolderHTLCOutput::build(if !htlc.offered {
if let Some(preimage) = self.payment_preimages.get(&htlc.payment_hash) {
Some(preimage.clone())
} else {
// We can't build an HTLC-Success transaction without the preimage
continue;
}
} else { None }, htlc.amount_msat);
let htlc_package = PackageTemplate::build_package(holder_tx.txid, transaction_output_index, PackageSolvingData::HolderHTLCOutput(htlc_output), height, false, height);
claim_requests.push(htlc_package);
}
}
@ -1964,7 +1815,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
/// Attempts to claim any claimable HTLCs in a commitment transaction which was not (yet)
/// revoked using data in holder_claimable_outpoints.
/// Should not be used if check_spend_revoked_transaction succeeds.
fn check_spend_holder_transaction<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) -> (Vec<ClaimRequest>, TransactionOutputs) where L::Target: Logger {
fn check_spend_holder_transaction<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) -> (Vec<PackageTemplate>, TransactionOutputs) where L::Target: Logger {
let commitment_txid = tx.txid();
let mut claim_requests = Vec::new();
let mut watch_outputs = Vec::new();
@ -2004,14 +1855,14 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
if self.current_holder_commitment_tx.txid == commitment_txid {
is_holder_tx = true;
log_trace!(logger, "Got latest holder commitment tx broadcast, searching for available HTLCs to claim");
let res = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx);
let res = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx, height);
let mut to_watch = self.get_broadcasted_holder_watch_outputs(&self.current_holder_commitment_tx, tx);
append_onchain_update!(res, to_watch);
} else if let &Some(ref holder_tx) = &self.prev_holder_signed_commitment_tx {
if holder_tx.txid == commitment_txid {
is_holder_tx = true;
log_trace!(logger, "Got previous holder commitment tx broadcast, searching for available HTLCs to claim");
let res = self.get_broadcasted_holder_claims(holder_tx);
let res = self.get_broadcasted_holder_claims(holder_tx, height);
let mut to_watch = self.get_broadcasted_holder_watch_outputs(holder_tx, tx);
append_onchain_update!(res, to_watch);
}
@ -2203,7 +2054,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
height: u32,
txn_matched: Vec<&Transaction>,
mut watch_outputs: Vec<TransactionOutputs>,
mut claimable_outpoints: Vec<ClaimRequest>,
mut claimable_outpoints: Vec<PackageTemplate>,
broadcaster: B,
fee_estimator: F,
logger: L,
@ -2215,11 +2066,13 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
{
let should_broadcast = self.would_broadcast_at_height(height, &logger);
if should_broadcast {
claimable_outpoints.push(ClaimRequest { absolute_timelock: height, aggregable: false, outpoint: BitcoinOutPoint { txid: self.funding_info.0.txid.clone(), vout: self.funding_info.0.index as u32 }, witness_data: InputMaterial::Funding { funding_redeemscript: self.funding_redeemscript.clone() }});
let funding_outp = HolderFundingOutput::build(self.funding_redeemscript.clone());
let commitment_package = PackageTemplate::build_package(self.funding_info.0.txid.clone(), self.funding_info.0.index as u32, PackageSolvingData::HolderFundingOutput(funding_outp), height, false, height);
claimable_outpoints.push(commitment_package);
self.pending_monitor_events.push(MonitorEvent::CommitmentTxBroadcasted(self.funding_info.0));
let commitment_tx = self.onchain_tx_handler.get_fully_signed_holder_tx(&self.funding_redeemscript);
self.holder_tx_signed = true;
let (mut new_outpoints, _) = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx);
let (mut new_outpoints, _) = self.get_broadcasted_holder_claims(&self.current_holder_commitment_tx, height);
let new_outputs = self.get_broadcasted_holder_watch_outputs(&self.current_holder_commitment_tx, &commitment_tx);
if !new_outputs.is_empty() {
watch_outputs.push((self.current_holder_commitment_tx.txid.clone(), new_outputs));
@ -3047,6 +2900,7 @@ mod tests {
use bitcoin::network::constants::Network;
use hex;
use chain::channelmonitor::ChannelMonitor;
use chain::onchain_utils::{WEIGHT_OFFERED_HTLC, WEIGHT_RECEIVED_HTLC, WEIGHT_REVOKED_OFFERED_HTLC, WEIGHT_REVOKED_RECEIVED_HTLC, WEIGHT_REVOKED_OUTPUT};
use chain::transaction::OutPoint;
use ln::{PaymentPreimage, PaymentHash};
use ln::channelmanager::BestBlock;
@ -3207,25 +3061,25 @@ mod tests {
let mut sum_actual_sigs = 0;
macro_rules! sign_input {
($sighash_parts: expr, $idx: expr, $amount: expr, $input_type: expr, $sum_actual_sigs: expr) => {
($sighash_parts: expr, $idx: expr, $amount: expr, $weight: expr, $sum_actual_sigs: expr) => {
let htlc = HTLCOutputInCommitment {
offered: if *$input_type == InputDescriptors::RevokedOfferedHTLC || *$input_type == InputDescriptors::OfferedHTLC { true } else { false },
offered: if *$weight == WEIGHT_REVOKED_OFFERED_HTLC || *$weight == WEIGHT_OFFERED_HTLC { true } else { false },
amount_msat: 0,
cltv_expiry: 2 << 16,
payment_hash: PaymentHash([1; 32]),
transaction_output_index: Some($idx as u32),
};
let redeem_script = if *$input_type == InputDescriptors::RevokedOutput { chan_utils::get_revokeable_redeemscript(&pubkey, 256, &pubkey) } else { chan_utils::get_htlc_redeemscript_with_explicit_keys(&htlc, &pubkey, &pubkey, &pubkey) };
let redeem_script = if *$weight == WEIGHT_REVOKED_OUTPUT { chan_utils::get_revokeable_redeemscript(&pubkey, 256, &pubkey) } else { chan_utils::get_htlc_redeemscript_with_explicit_keys(&htlc, &pubkey, &pubkey, &pubkey) };
let sighash = hash_to_message!(&$sighash_parts.signature_hash($idx, &redeem_script, $amount, SigHashType::All)[..]);
let sig = secp_ctx.sign(&sighash, &privkey);
$sighash_parts.access_witness($idx).push(sig.serialize_der().to_vec());
$sighash_parts.access_witness($idx)[0].push(SigHashType::All as u8);
sum_actual_sigs += $sighash_parts.access_witness($idx)[0].len();
if *$input_type == InputDescriptors::RevokedOutput {
if *$weight == WEIGHT_REVOKED_OUTPUT {
$sighash_parts.access_witness($idx).push(vec!(1));
} else if *$input_type == InputDescriptors::RevokedOfferedHTLC || *$input_type == InputDescriptors::RevokedReceivedHTLC {
} else if *$weight == WEIGHT_REVOKED_OFFERED_HTLC || *$weight == WEIGHT_REVOKED_RECEIVED_HTLC {
$sighash_parts.access_witness($idx).push(pubkey.clone().serialize().to_vec());
} else if *$input_type == InputDescriptors::ReceivedHTLC {
} else if *$weight == WEIGHT_RECEIVED_HTLC {
$sighash_parts.access_witness($idx).push(vec![0]);
} else {
$sighash_parts.access_witness($idx).push(PaymentPreimage([1; 32]).0.to_vec());
@ -3258,14 +3112,16 @@ mod tests {
value: 0,
});
let base_weight = claim_tx.get_weight();
let inputs_des = vec![InputDescriptors::RevokedOutput, InputDescriptors::RevokedOfferedHTLC, InputDescriptors::RevokedOfferedHTLC, InputDescriptors::RevokedReceivedHTLC];
let inputs_weight = vec![WEIGHT_REVOKED_OUTPUT, WEIGHT_REVOKED_OFFERED_HTLC, WEIGHT_REVOKED_OFFERED_HTLC, WEIGHT_REVOKED_RECEIVED_HTLC];
let mut inputs_total_weight = 2; // count segwit flags
{
let mut sighash_parts = bip143::SigHashCache::new(&mut claim_tx);
for (idx, inp) in inputs_des.iter().enumerate() {
for (idx, inp) in inputs_weight.iter().enumerate() {
sign_input!(sighash_parts, idx, 0, inp, sum_actual_sigs);
inputs_total_weight += inp;
}
}
assert_eq!(base_weight + package::get_witnesses_weight(&inputs_des[..]), claim_tx.get_weight() + /* max_length_sig */ (73 * inputs_des.len() - sum_actual_sigs));
assert_eq!(base_weight + inputs_total_weight as usize, claim_tx.get_weight() + /* max_length_sig */ (73 * inputs_weight.len() - sum_actual_sigs));
// Claim tx with 1 offered HTLCs, 3 received HTLCs
claim_tx.input.clear();
@ -3282,14 +3138,16 @@ mod tests {
});
}
let base_weight = claim_tx.get_weight();
let inputs_des = vec![InputDescriptors::OfferedHTLC, InputDescriptors::ReceivedHTLC, InputDescriptors::ReceivedHTLC, InputDescriptors::ReceivedHTLC];
let inputs_weight = vec![WEIGHT_OFFERED_HTLC, WEIGHT_RECEIVED_HTLC, WEIGHT_RECEIVED_HTLC, WEIGHT_RECEIVED_HTLC];
let mut inputs_total_weight = 2; // count segwit flags
{
let mut sighash_parts = bip143::SigHashCache::new(&mut claim_tx);
for (idx, inp) in inputs_des.iter().enumerate() {
for (idx, inp) in inputs_weight.iter().enumerate() {
sign_input!(sighash_parts, idx, 0, inp, sum_actual_sigs);
inputs_total_weight += inp;
}
}
assert_eq!(base_weight + package::get_witnesses_weight(&inputs_des[..]), claim_tx.get_weight() + /* max_length_sig */ (73 * inputs_des.len() - sum_actual_sigs));
assert_eq!(base_weight + inputs_total_weight as usize, claim_tx.get_weight() + /* max_length_sig */ (73 * inputs_weight.len() - sum_actual_sigs));
// Justice tx with 1 revoked HTLC-Success tx output
claim_tx.input.clear();
@ -3304,14 +3162,16 @@ mod tests {
witness: Vec::new(),
});
let base_weight = claim_tx.get_weight();
let inputs_des = vec![InputDescriptors::RevokedOutput];
let inputs_weight = vec![WEIGHT_REVOKED_OUTPUT];
let mut inputs_total_weight = 2; // count segwit flags
{
let mut sighash_parts = bip143::SigHashCache::new(&mut claim_tx);
for (idx, inp) in inputs_des.iter().enumerate() {
for (idx, inp) in inputs_weight.iter().enumerate() {
sign_input!(sighash_parts, idx, 0, inp, sum_actual_sigs);
inputs_total_weight += inp;
}
}
assert_eq!(base_weight + package::get_witnesses_weight(&inputs_des[..]), claim_tx.get_weight() + /* max_length_isg */ (73 * inputs_des.len() - sum_actual_sigs));
assert_eq!(base_weight + inputs_total_weight as usize, claim_tx.get_weight() + /* max_length_isg */ (73 * inputs_weight.len() - sum_actual_sigs));
}
// Further testing is done in the ChannelManager integration tests.

View file

@ -27,8 +27,8 @@ pub mod msgs;
pub mod peer_handler;
pub mod chan_utils;
pub mod features;
pub(crate) mod onchaintx;
pub(crate) mod package;
pub mod onchaintx;
pub mod package;
#[cfg(feature = "fuzztarget")]
pub mod peer_channel_encryptor;

View file

@ -12,7 +12,7 @@
//! OnchainTxHandler objects are fully-part of ChannelMonitor and encapsulates all
//! building, tracking, bumping and notifications functions.
use bitcoin::blockdata::transaction::{Transaction, TxIn, TxOut, SigHashType};
use bitcoin::blockdata::transaction::Transaction;
use bitcoin::blockdata::transaction::OutPoint as BitcoinOutPoint;
use bitcoin::blockdata::script::Script;
@ -23,12 +23,11 @@ use bitcoin::secp256k1;
use ln::msgs::DecodeError;
use ln::PaymentPreimage;
use ln::chan_utils;
use ln::chan_utils::{TxCreationKeys, ChannelTransactionParameters, HolderCommitmentTransaction};
use ln::package::InputDescriptors;
use ln::chan_utils::{ChannelTransactionParameters, HolderCommitmentTransaction};
use ln::package;
use chain::chaininterface::{FeeEstimator, BroadcasterInterface, ConfirmationTarget, MIN_RELAY_FEE_SAT_PER_1000_WEIGHT};
use chain::channelmonitor::{ANTI_REORG_DELAY, CLTV_SHARED_CLAIM_BUFFER, InputMaterial, ClaimRequest};
use ln::package::PackageTemplate;
use chain::chaininterface::{FeeEstimator, BroadcasterInterface};
use chain::channelmonitor::{ANTI_REORG_DELAY, CLTV_SHARED_CLAIM_BUFFER};
use chain::keysinterface::{Sign, KeysInterface};
use util::logger::Logger;
use util::ser::{Readable, ReadableArgs, Writer, Writeable, VecWriter};
@ -75,88 +74,7 @@ enum OnchainEvent {
/// In this case, we need to drop the outpoint and regenerate a new claim tx. By safety, we keep tracking
/// the outpoint to be sure to resurect it back to the claim tx if reorgs happen.
ContentiousOutpoint {
outpoint: BitcoinOutPoint,
input_material: InputMaterial,
}
}
/// Higher-level cache structure needed to re-generate bumped claim txn if needed
#[derive(Clone, PartialEq)]
pub struct ClaimTxBumpMaterial {
// At every block tick, used to check if pending claiming tx is taking too
// much time for confirmation and we need to bump it.
height_timer: Option<u32>,
// Tracked in case of reorg to wipe out now-superflous bump material
feerate_previous: u32,
// Soonest timelocks among set of outpoints claimed, used to compute
// a priority of not feerate
soonest_timelock: u32,
// Cache of script, pubkey, sig or key to solve claimable outputs scriptpubkey.
per_input_material: HashMap<BitcoinOutPoint, InputMaterial>,
}
impl Writeable for ClaimTxBumpMaterial {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ::std::io::Error> {
self.height_timer.write(writer)?;
writer.write_all(&byte_utils::be32_to_array(self.feerate_previous))?;
writer.write_all(&byte_utils::be32_to_array(self.soonest_timelock))?;
writer.write_all(&byte_utils::be64_to_array(self.per_input_material.len() as u64))?;
for (outp, tx_material) in self.per_input_material.iter() {
outp.write(writer)?;
tx_material.write(writer)?;
}
Ok(())
}
}
impl Readable for ClaimTxBumpMaterial {
fn read<R: ::std::io::Read>(reader: &mut R) -> Result<Self, DecodeError> {
let height_timer = Readable::read(reader)?;
let feerate_previous = Readable::read(reader)?;
let soonest_timelock = Readable::read(reader)?;
let per_input_material_len: u64 = Readable::read(reader)?;
let mut per_input_material = HashMap::with_capacity(cmp::min(per_input_material_len as usize, MAX_ALLOC_SIZE / 128));
for _ in 0 ..per_input_material_len {
let outpoint = Readable::read(reader)?;
let input_material = Readable::read(reader)?;
per_input_material.insert(outpoint, input_material);
}
Ok(Self { height_timer, feerate_previous, soonest_timelock, per_input_material })
}
}
macro_rules! subtract_high_prio_fee {
($logger: ident, $fee_estimator: expr, $value: expr, $predicted_weight: expr, $used_feerate: expr) => {
{
$used_feerate = $fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::HighPriority).into();
let mut fee = $used_feerate as u64 * $predicted_weight / 1000;
if $value <= fee {
$used_feerate = $fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::Normal).into();
fee = $used_feerate as u64 * $predicted_weight / 1000;
if $value <= fee.into() {
$used_feerate = $fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::Background).into();
fee = $used_feerate as u64 * $predicted_weight / 1000;
if $value <= fee {
log_error!($logger, "Failed to generate an on-chain punishment tx as even low priority fee ({} sat) was more than the entire claim balance ({} sat)",
fee, $value);
false
} else {
log_warn!($logger, "Used low priority fee for on-chain punishment tx as high priority fee was more than the entire claim balance ({} sat)",
$value);
$value -= fee;
true
}
} else {
log_warn!($logger, "Used medium priority fee for on-chain punishment tx as high priority fee was more than the entire claim balance ({} sat)",
$value);
$value -= fee;
true
}
} else {
$value -= fee;
true
}
}
package: PackageTemplate,
}
}
@ -232,9 +150,9 @@ pub struct OnchainTxHandler<ChannelSigner: Sign> {
// us and is immutable until all outpoint of the claimable set are post-anti-reorg-delay solved.
// Entry is cache of elements need to generate a bumped claiming transaction (see ClaimTxBumpMaterial)
#[cfg(test)] // Used in functional_test to verify sanitization
pub pending_claim_requests: HashMap<Txid, ClaimTxBumpMaterial>,
pub(crate) pending_claim_requests: HashMap<Txid, PackageTemplate>,
#[cfg(not(test))]
pending_claim_requests: HashMap<Txid, ClaimTxBumpMaterial>,
pending_claim_requests: HashMap<Txid, PackageTemplate>,
// Used to link outpoints claimed in a connected block to a pending claim request.
// Key is outpoint than monitor parsing has detected we have keys/scripts to claim
@ -277,9 +195,9 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
writer.write_all(&key_data.0[..])?;
writer.write_all(&byte_utils::be64_to_array(self.pending_claim_requests.len() as u64))?;
for (ref ancestor_claim_txid, claim_tx_data) in self.pending_claim_requests.iter() {
for (ref ancestor_claim_txid, request) in self.pending_claim_requests.iter() {
ancestor_claim_txid.write(writer)?;
claim_tx_data.write(writer)?;
request.write(writer)?;
}
writer.write_all(&byte_utils::be64_to_array(self.claimable_outpoints.len() as u64))?;
@ -298,10 +216,9 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
writer.write_all(&[0; 1])?;
claim_request.write(writer)?;
},
OnchainEvent::ContentiousOutpoint { ref outpoint, ref input_material } => {
OnchainEvent::ContentiousOutpoint { ref package } => {
writer.write_all(&[1; 1])?;
outpoint.write(writer)?;
input_material.write(writer)?;
package.write(writer)?;
}
}
}
@ -363,11 +280,9 @@ impl<'a, K: KeysInterface> ReadableArgs<&'a K> for OnchainTxHandler<K::Signer> {
}
},
1 => {
let outpoint = Readable::read(reader)?;
let input_material = Readable::read(reader)?;
let package = Readable::read(reader)?;
OnchainEvent::ContentiousOutpoint {
outpoint,
input_material
package
}
}
_ => return Err(DecodeError::InvalidValue),
@ -435,188 +350,33 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
/// (CSV or CLTV following cases). In case of high-fee spikes, claim tx may stuck in the mempool, so you need to bump its feerate quickly using Replace-By-Fee or Child-Pay-For-Parent.
/// Panics if there are signing errors, because signing operations in reaction to on-chain events
/// are not expected to fail, and if they do, we may lose funds.
fn generate_claim_tx<F: Deref, L: Deref>(&mut self, height: u32, cached_claim_datas: &ClaimTxBumpMaterial, fee_estimator: &F, logger: &L) -> Option<(Option<u32>, u32, Transaction)>
fn generate_claim_tx<F: Deref, L: Deref>(&mut self, height: u32, cached_request: &PackageTemplate, fee_estimator: &F, logger: &L) -> Option<(Option<u32>, u64, Transaction)>
where F::Target: FeeEstimator,
L::Target: Logger,
{
if cached_claim_datas.per_input_material.len() == 0 { return None } // But don't prune pending claiming request yet, we may have to resurrect HTLCs
let mut inputs = Vec::new();
for outp in cached_claim_datas.per_input_material.keys() {
log_trace!(logger, "Outpoint {}:{}", outp.txid, outp.vout);
inputs.push(TxIn {
previous_output: *outp,
script_sig: Script::new(),
sequence: 0xfffffffd,
witness: Vec::new(),
});
}
let mut bumped_tx = Transaction {
version: 2,
lock_time: 0,
input: inputs,
output: vec![TxOut {
script_pubkey: self.destination_script.clone(),
value: 0
}],
};
macro_rules! RBF_bump {
($amount: expr, $old_feerate: expr, $fee_estimator: expr, $predicted_weight: expr) => {
{
let mut used_feerate: u32;
// If old feerate inferior to actual one given back by Fee Estimator, use it to compute new fee...
let new_fee = if $old_feerate < $fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::HighPriority) {
let mut value = $amount;
if subtract_high_prio_fee!(logger, $fee_estimator, value, $predicted_weight, used_feerate) {
// Overflow check is done in subtract_high_prio_fee
($amount - value)
} else {
log_trace!(logger, "Can't new-estimation bump new claiming tx, amount {} is too small", $amount);
return None;
}
// ...else just increase the previous feerate by 25% (because that's a nice number)
} else {
let fee = $old_feerate as u64 * ($predicted_weight as u64) / 750;
if $amount <= fee {
log_trace!(logger, "Can't 25% bump new claiming tx, amount {} is too small", $amount);
return None;
}
fee
};
let previous_fee = $old_feerate as u64 * ($predicted_weight as u64) / 1000;
let min_relay_fee = MIN_RELAY_FEE_SAT_PER_1000_WEIGHT * ($predicted_weight as u64) / 1000;
// BIP 125 Opt-in Full Replace-by-Fee Signaling
// * 3. The replacement transaction pays an absolute fee of at least the sum paid by the original transactions.
// * 4. The replacement transaction must also pay for its own bandwidth at or above the rate set by the node's minimum relay fee setting.
let new_fee = if new_fee < previous_fee + min_relay_fee {
new_fee + previous_fee + min_relay_fee - new_fee
} else {
new_fee
};
Some((new_fee, new_fee * 1000 / ($predicted_weight as u64)))
}
}
}
if cached_request.outpoints().len() == 0 { return None } // But don't prune pending claiming request yet, we may have to resurrect HTLCs
// Compute new height timer to decide when we need to regenerate a new bumped version of the claim tx (if we
// didn't receive confirmation of it before, or not enough reorg-safe depth on top of it).
let new_timer = Some(Self::get_height_timer(height, cached_claim_datas.soonest_timelock));
let mut inputs_witnesses_weight = 0;
let mut amt = 0;
let mut dynamic_fee = true;
for per_outp_material in cached_claim_datas.per_input_material.values() {
match per_outp_material {
&InputMaterial::Revoked { ref input_descriptor, ref amount, .. } => {
inputs_witnesses_weight += package::get_witnesses_weight(&[*input_descriptor]);
amt += *amount;
},
&InputMaterial::CounterpartyHTLC { ref preimage, ref htlc, .. } => {
inputs_witnesses_weight += package::get_witnesses_weight(if preimage.is_some() { &[InputDescriptors::OfferedHTLC] } else { &[InputDescriptors::ReceivedHTLC] });
amt += htlc.amount_msat / 1000;
},
&InputMaterial::HolderHTLC { .. } => {
dynamic_fee = false;
},
&InputMaterial::Funding { .. } => {
dynamic_fee = false;
}
let new_timer = Some(Self::get_height_timer(height, cached_request.timelock()));
let amt = cached_request.package_amount();
if cached_request.is_malleable() {
let predicted_weight = cached_request.package_weight(&self.destination_script);
if let Some((output_value, new_feerate)) = package::compute_output_value(predicted_weight, amt, cached_request.feerate(), fee_estimator, logger) {
assert!(new_feerate != 0);
let transaction = cached_request.finalize_package(self, output_value, self.destination_script.clone(), logger).unwrap();
log_trace!(logger, "...with timer {} and feerate {}", new_timer.unwrap(), new_feerate);
assert!(predicted_weight >= transaction.get_weight());
return Some((new_timer, new_feerate, transaction))
}
}
if dynamic_fee {
let predicted_weight = (bumped_tx.get_weight() + inputs_witnesses_weight) as u64;
let mut new_feerate;
// If old feerate is 0, first iteration of this claim, use normal fee calculation
if cached_claim_datas.feerate_previous != 0 {
if let Some((new_fee, feerate)) = RBF_bump!(amt, cached_claim_datas.feerate_previous, fee_estimator, predicted_weight) {
// If new computed fee is superior at the whole claimable amount burn all in fees
if new_fee as u64 > amt {
bumped_tx.output[0].value = 0;
} else {
bumped_tx.output[0].value = amt - new_fee as u64;
}
new_feerate = feerate;
} else { return None; }
} else {
if subtract_high_prio_fee!(logger, fee_estimator, amt, predicted_weight, new_feerate) {
bumped_tx.output[0].value = amt;
} else { return None; }
}
assert!(new_feerate != 0);
for (i, (outp, per_outp_material)) in cached_claim_datas.per_input_material.iter().enumerate() {
match per_outp_material {
&InputMaterial::Revoked { ref per_commitment_point, ref counterparty_delayed_payment_base_key, ref counterparty_htlc_base_key, ref per_commitment_key, ref input_descriptor, ref amount, ref htlc, ref on_counterparty_tx_csv } => {
if let Ok(tx_keys) = TxCreationKeys::derive_new(&self.secp_ctx, &per_commitment_point, counterparty_delayed_payment_base_key, counterparty_htlc_base_key, &self.signer.pubkeys().revocation_basepoint, &self.signer.pubkeys().htlc_basepoint) {
let witness_script = if let Some(ref htlc) = *htlc {
chan_utils::get_htlc_redeemscript_with_explicit_keys(&htlc, &tx_keys.broadcaster_htlc_key, &tx_keys.countersignatory_htlc_key, &tx_keys.revocation_key)
} else {
chan_utils::get_revokeable_redeemscript(&tx_keys.revocation_key, *on_counterparty_tx_csv, &tx_keys.broadcaster_delayed_payment_key)
};
let sig = if let Some(ref htlc) = *htlc {
self.signer.sign_justice_revoked_htlc(&bumped_tx, i, *amount, &per_commitment_key, &htlc, &self.secp_ctx).expect("sign justice tx")
} else {
self.signer.sign_justice_revoked_output(&bumped_tx, i, *amount, &per_commitment_key, &self.secp_ctx).expect("sign justice tx")
};
bumped_tx.input[i].witness.push(sig.serialize_der().to_vec());
bumped_tx.input[i].witness[0].push(SigHashType::All as u8);
if htlc.is_some() {
bumped_tx.input[i].witness.push(tx_keys.revocation_key.clone().serialize().to_vec());
} else {
bumped_tx.input[i].witness.push(vec!(1));
}
bumped_tx.input[i].witness.push(witness_script.clone().into_bytes());
log_trace!(logger, "Going to broadcast Penalty Transaction {} claiming revoked {} output {} from {} with new feerate {}...", bumped_tx.txid(), if *input_descriptor == InputDescriptors::RevokedOutput { "to_holder" } else if *input_descriptor == InputDescriptors::RevokedOfferedHTLC { "offered" } else if *input_descriptor == InputDescriptors::RevokedReceivedHTLC { "received" } else { "" }, outp.vout, outp.txid, new_feerate);
}
},
&InputMaterial::CounterpartyHTLC { ref per_commitment_point, ref counterparty_delayed_payment_base_key, ref counterparty_htlc_base_key, ref preimage, ref htlc } => {
if let Ok(tx_keys) = TxCreationKeys::derive_new(&self.secp_ctx, &per_commitment_point, counterparty_delayed_payment_base_key, counterparty_htlc_base_key, &self.signer.pubkeys().revocation_basepoint, &self.signer.pubkeys().htlc_basepoint) {
let witness_script = chan_utils::get_htlc_redeemscript_with_explicit_keys(&htlc, &tx_keys.broadcaster_htlc_key, &tx_keys.countersignatory_htlc_key, &tx_keys.revocation_key);
if !preimage.is_some() { bumped_tx.lock_time = htlc.cltv_expiry }; // Right now we don't aggregate time-locked transaction, if we do we should set lock_time before to avoid breaking hash computation
let sig = self.signer.sign_counterparty_htlc_transaction(&bumped_tx, i, &htlc.amount_msat / 1000, &per_commitment_point, htlc, &self.secp_ctx).expect("sign counterparty HTLC tx");
bumped_tx.input[i].witness.push(sig.serialize_der().to_vec());
bumped_tx.input[i].witness[0].push(SigHashType::All as u8);
if let &Some(preimage) = preimage {
bumped_tx.input[i].witness.push(preimage.0.to_vec());
} else {
// Due to BIP146 (MINIMALIF) this must be a zero-length element to relay.
bumped_tx.input[i].witness.push(vec![]);
}
bumped_tx.input[i].witness.push(witness_script.clone().into_bytes());
log_trace!(logger, "Going to broadcast Claim Transaction {} claiming counterparty {} htlc output {} from {} with new feerate {}...", bumped_tx.txid(), if preimage.is_some() { "offered" } else { "received" }, outp.vout, outp.txid, new_feerate);
}
},
_ => unreachable!()
}
}
log_trace!(logger, "...with timer {}", new_timer.unwrap());
assert!(predicted_weight >= bumped_tx.get_weight() as u64);
return Some((new_timer, new_feerate as u32, bumped_tx))
} else {
for (_, (outp, per_outp_material)) in cached_claim_datas.per_input_material.iter().enumerate() {
match per_outp_material {
&InputMaterial::HolderHTLC { ref preimage, ref amount } => {
let htlc_tx = self.get_fully_signed_htlc_tx(outp, preimage);
if let Some(htlc_tx) = htlc_tx {
let feerate = (amount - htlc_tx.output[0].value) * 1000 / htlc_tx.get_weight() as u64;
// Timer set to $NEVER given we can't bump tx without anchor outputs
log_trace!(logger, "Going to broadcast Holder HTLC-{} claiming HTLC output {} from {}...", if preimage.is_some() { "Success" } else { "Timeout" }, outp.vout, outp.txid);
return Some((None, feerate as u32, htlc_tx));
}
return None;
},
&InputMaterial::Funding { ref funding_redeemscript } => {
let signed_tx = self.get_fully_signed_holder_tx(funding_redeemscript);
// Timer set to $NEVER given we can't bump tx without anchor outputs
log_trace!(logger, "Going to broadcast Holder Transaction {} claiming funding output {} from {}...", signed_tx.txid(), outp.vout, outp.txid);
return Some((None, self.holder_commitment.feerate_per_kw(), signed_tx));
}
_ => unreachable!()
}
// Note: Currently, amounts of holder outputs spending witnesses aren't used
// as we can't malleate spending package to increase their feerate. This
// should change with the remaining anchor output patchset.
debug_assert!(amt == 0);
if let Some(transaction) = cached_request.finalize_package(self, amt, self.destination_script.clone(), logger) {
return Some((None, 0, transaction));
}
}
None
@ -626,7 +386,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
/// for this channel, provide new relevant on-chain transactions and/or new claim requests.
/// Formerly this was named `block_connected`, but it is now also used for claiming an HTLC output
/// if we receive a preimage after force-close.
pub(crate) fn update_claims_view<B: Deref, F: Deref, L: Deref>(&mut self, txn_matched: &[&Transaction], claimable_outpoints: Vec<ClaimRequest>, latest_height: Option<u32>, broadcaster: &B, fee_estimator: &F, logger: &L)
pub(crate) fn update_claims_view<B: Deref, F: Deref, L: Deref>(&mut self, txn_matched: &[&Transaction], requests: Vec<PackageTemplate>, latest_height: Option<u32>, broadcaster: &B, fee_estimator: &F, logger: &L)
where B::Target: BroadcasterInterface,
F::Target: FeeEstimator,
L::Target: Logger,
@ -635,45 +395,43 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
Some(h) => h,
None => self.latest_height,
};
log_trace!(logger, "Updating claims view at height {} with {} matched transactions and {} claim requests", height, txn_matched.len(), claimable_outpoints.len());
let mut new_claims = Vec::new();
let mut aggregated_claim = HashMap::new();
let mut aggregated_soonest = ::core::u32::MAX;
log_trace!(logger, "Updating claims view at height {} with {} matched transactions and {} claim requests", height, txn_matched.len(), requests.len());
let mut preprocessed_requests = Vec::with_capacity(requests.len());
let mut aggregated_request = None;
// Try to aggregate outputs if their timelock expiration isn't imminent (absolute_timelock
// Try to aggregate outputs if their timelock expiration isn't imminent (package timelock
// <= CLTV_SHARED_CLAIM_BUFFER) and they don't require an immediate nLockTime (aggregable).
for req in claimable_outpoints {
for req in requests {
// Don't claim a outpoint twice that would be bad for privacy and may uselessly lock a CPFP input for a while
if let Some(_) = self.claimable_outpoints.get(&req.outpoint) { log_trace!(logger, "Bouncing off outpoint {}:{}, already registered its claiming request", req.outpoint.txid, req.outpoint.vout); } else {
log_trace!(logger, "Test if outpoint can be aggregated with expiration {} against {}", req.absolute_timelock, height + CLTV_SHARED_CLAIM_BUFFER);
if req.absolute_timelock <= height + CLTV_SHARED_CLAIM_BUFFER || !req.aggregable { // Don't aggregate if outpoint absolute timelock is soon or marked as non-aggregable
let mut single_input = HashMap::new();
single_input.insert(req.outpoint, req.witness_data);
new_claims.push((req.absolute_timelock, single_input));
if let Some(_) = self.claimable_outpoints.get(req.outpoints()[0]) { log_trace!(logger, "Bouncing off outpoint {}:{}, already registered its claiming request", req.outpoints()[0].txid, req.outpoints()[0].vout); } else {
log_trace!(logger, "Test if outpoint can be aggregated with expiration {} against {}", req.timelock(), height + CLTV_SHARED_CLAIM_BUFFER);
if req.timelock() <= height + CLTV_SHARED_CLAIM_BUFFER || !req.aggregable() {
// Don't aggregate if outpoint package timelock is soon or marked as non-aggregable
preprocessed_requests.push(req);
} else if aggregated_request.is_none() {
aggregated_request = Some(req);
} else {
aggregated_claim.insert(req.outpoint, req.witness_data);
if req.absolute_timelock < aggregated_soonest {
aggregated_soonest = req.absolute_timelock;
}
aggregated_request.as_mut().unwrap().merge_package(req);
}
}
}
new_claims.push((aggregated_soonest, aggregated_claim));
if let Some(req) = aggregated_request {
preprocessed_requests.push(req);
}
// Generate claim transactions and track them to bump if necessary at
// height timer expiration (i.e in how many blocks we're going to take action).
for (soonest_timelock, claim) in new_claims.drain(..) {
let mut claim_material = ClaimTxBumpMaterial { height_timer: None, feerate_previous: 0, soonest_timelock, per_input_material: claim };
if let Some((new_timer, new_feerate, tx)) = self.generate_claim_tx(height, &claim_material, &*fee_estimator, &*logger) {
claim_material.height_timer = new_timer;
claim_material.feerate_previous = new_feerate;
for mut req in preprocessed_requests {
if let Some((new_timer, new_feerate, tx)) = self.generate_claim_tx(height, &req, &*fee_estimator, &*logger) {
req.set_timer(new_timer);
req.set_feerate(new_feerate);
let txid = tx.txid();
for k in claim_material.per_input_material.keys() {
for k in req.outpoints() {
log_trace!(logger, "Registering claiming request for {}:{}", k.txid, k.vout);
self.claimable_outpoints.insert(k.clone(), (txid, height));
}
self.pending_claim_requests.insert(txid, claim_material);
log_info!(logger, "Broadcasting onchain {}", log_tx!(tx));
self.pending_claim_requests.insert(txid, req);
log_trace!(logger, "Broadcasting onchain {}", log_tx!(tx));
broadcaster.broadcast_transaction(&tx);
}
}
@ -685,16 +443,16 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
for inp in &tx.input {
if let Some(first_claim_txid_height) = self.claimable_outpoints.get(&inp.previous_output) {
// If outpoint has claim request pending on it...
if let Some(claim_material) = self.pending_claim_requests.get_mut(&first_claim_txid_height.0) {
if let Some(request) = self.pending_claim_requests.get_mut(&first_claim_txid_height.0) {
//... we need to verify equality between transaction outpoints and claim request
// outpoints to know if transaction is the original claim or a bumped one issued
// by us.
let mut set_equality = true;
if claim_material.per_input_material.len() != tx.input.len() {
if request.outpoints().len() != tx.input.len() {
set_equality = false;
} else {
for (claim_inp, tx_inp) in claim_material.per_input_material.keys().zip(tx.input.iter()) {
if *claim_inp != tx_inp.previous_output {
for (claim_inp, tx_inp) in request.outpoints().iter().zip(tx.input.iter()) {
if **claim_inp != tx_inp.previous_output {
set_equality = false;
}
}
@ -721,18 +479,18 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
} else { // If false, generate new claim request with update outpoint set
let mut at_least_one_drop = false;
for input in tx.input.iter() {
if let Some(input_material) = claim_material.per_input_material.remove(&input.previous_output) {
claimed_outputs_material.push((input.previous_output, input_material));
if let Some(package) = request.split_package(&input.previous_output) {
claimed_outputs_material.push(package);
at_least_one_drop = true;
}
// If there are no outpoints left to claim in this request, drop it entirely after ANTI_REORG_DELAY.
if claim_material.per_input_material.is_empty() {
if request.outpoints().is_empty() {
clean_claim_request_after_safety_delay!();
}
}
//TODO: recompute soonest_timelock to avoid wasting a bit on fees
if at_least_one_drop {
bump_candidates.insert(first_claim_txid_height.0.clone(), claim_material.clone());
bump_candidates.insert(first_claim_txid_height.0.clone(), request.clone());
}
}
break; //No need to iterate further, either tx is our or their
@ -741,11 +499,11 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
}
}
}
for (outpoint, input_material) in claimed_outputs_material.drain(..) {
for package in claimed_outputs_material.drain(..) {
let entry = OnchainEventEntry {
txid: tx.txid(),
height,
event: OnchainEvent::ContentiousOutpoint { outpoint, input_material },
event: OnchainEvent::ContentiousOutpoint { package },
};
if !self.onchain_events_awaiting_threshold_conf.contains(&entry) {
self.onchain_events_awaiting_threshold_conf.push(entry);
@ -762,14 +520,14 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
OnchainEvent::Claim { claim_request } => {
// We may remove a whole set of claim outpoints here, as these one may have
// been aggregated in a single tx and claimed so atomically
if let Some(bump_material) = self.pending_claim_requests.remove(&claim_request) {
for outpoint in bump_material.per_input_material.keys() {
if let Some(request) = self.pending_claim_requests.remove(&claim_request) {
for outpoint in request.outpoints() {
self.claimable_outpoints.remove(&outpoint);
}
}
},
OnchainEvent::ContentiousOutpoint { outpoint, .. } => {
self.claimable_outpoints.remove(&outpoint);
OnchainEvent::ContentiousOutpoint { package } => {
self.claimable_outpoints.remove(&package.outpoints()[0]);
}
}
} else {
@ -778,23 +536,23 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
}
// Check if any pending claim request must be rescheduled
for (first_claim_txid, ref claim_data) in self.pending_claim_requests.iter() {
if let Some(height_timer) = claim_data.height_timer {
if height >= height_timer {
bump_candidates.insert(*first_claim_txid, (*claim_data).clone());
for (first_claim_txid, ref request) in self.pending_claim_requests.iter() {
if let Some(h) = request.timer() {
if height >= h {
bump_candidates.insert(*first_claim_txid, (*request).clone());
}
}
}
// Build, bump and rebroadcast tx accordingly
log_trace!(logger, "Bumping {} candidates", bump_candidates.len());
for (first_claim_txid, claim_material) in bump_candidates.iter() {
if let Some((new_timer, new_feerate, bump_tx)) = self.generate_claim_tx(height, &claim_material, &*fee_estimator, &*logger) {
log_info!(logger, "Broadcasting onchain {}", log_tx!(bump_tx));
for (first_claim_txid, request) in bump_candidates.iter() {
if let Some((new_timer, new_feerate, bump_tx)) = self.generate_claim_tx(height, &request, &*fee_estimator, &*logger) {
log_trace!(logger, "Broadcasting onchain {}", log_tx!(bump_tx));
broadcaster.broadcast_transaction(&bump_tx);
if let Some(claim_material) = self.pending_claim_requests.get_mut(first_claim_txid) {
claim_material.height_timer = new_timer;
claim_material.feerate_previous = new_feerate;
if let Some(request) = self.pending_claim_requests.get_mut(first_claim_txid) {
request.set_timer(new_timer);
request.set_feerate(new_feerate);
}
}
}
@ -837,13 +595,13 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
//- our claim tx on a commitment tx output
//- resurect outpoint back in its claimable set and regenerate tx
match entry.event {
OnchainEvent::ContentiousOutpoint { outpoint, input_material } => {
if let Some(ancestor_claimable_txid) = self.claimable_outpoints.get(&outpoint) {
if let Some(claim_material) = self.pending_claim_requests.get_mut(&ancestor_claimable_txid.0) {
claim_material.per_input_material.insert(outpoint, input_material);
OnchainEvent::ContentiousOutpoint { package } => {
if let Some(ancestor_claimable_txid) = self.claimable_outpoints.get(&package.outpoints()[0]) {
if let Some(request) = self.pending_claim_requests.get_mut(&ancestor_claimable_txid.0) {
request.merge_package(package);
// Using a HashMap guarantee us than if we have multiple outpoints getting
// resurrected only one bump claim tx is going to be broadcast
bump_candidates.insert(ancestor_claimable_txid.clone(), claim_material.clone());
bump_candidates.insert(ancestor_claimable_txid.clone(), request.clone());
}
}
},
@ -853,16 +611,16 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
self.onchain_events_awaiting_threshold_conf.push(entry);
}
}
for (_, claim_material) in bump_candidates.iter_mut() {
if let Some((new_timer, new_feerate, bump_tx)) = self.generate_claim_tx(height, &claim_material, &&*fee_estimator, &&*logger) {
claim_material.height_timer = new_timer;
claim_material.feerate_previous = new_feerate;
for (_, request) in bump_candidates.iter_mut() {
if let Some((new_timer, new_feerate, bump_tx)) = self.generate_claim_tx(height, &request, &&*fee_estimator, &&*logger) {
request.set_timer(new_timer);
request.set_feerate(new_feerate);
log_info!(logger, "Broadcasting onchain {}", log_tx!(bump_tx));
broadcaster.broadcast_transaction(&bump_tx);
}
}
for (ancestor_claim_txid, claim_material) in bump_candidates.drain() {
self.pending_claim_requests.insert(ancestor_claim_txid.0, claim_material);
for (ancestor_claim_txid, request) in bump_candidates.drain() {
self.pending_claim_requests.insert(ancestor_claim_txid.0, request);
}
//TODO: if we implement cross-block aggregated claim transaction we need to refresh set of outpoints and regenerate tx but
// right now if one of the outpoint get disconnected, just erase whole pending claim request.

View file

@ -707,93 +707,6 @@ impl Readable for PackageTemplate {
}
}
/// Utilities for computing witnesses weight and feerate computation for onchain operation
#[derive(PartialEq, Clone, Copy)]
pub(crate) enum InputDescriptors {
RevokedOfferedHTLC,
RevokedReceivedHTLC,
OfferedHTLC,
ReceivedHTLC,
RevokedOutput, // either a revoked to_holder output on commitment tx, a revoked HTLC-Timeout output or a revoked HTLC-Success output
}
impl Writeable for InputDescriptors {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ::std::io::Error> {
match self {
&InputDescriptors::RevokedOfferedHTLC => {
writer.write_all(&[0; 1])?;
},
&InputDescriptors::RevokedReceivedHTLC => {
writer.write_all(&[1; 1])?;
},
&InputDescriptors::OfferedHTLC => {
writer.write_all(&[2; 1])?;
},
&InputDescriptors::ReceivedHTLC => {
writer.write_all(&[3; 1])?;
}
&InputDescriptors::RevokedOutput => {
writer.write_all(&[4; 1])?;
}
}
Ok(())
}
}
impl Readable for InputDescriptors {
fn read<R: ::std::io::Read>(reader: &mut R) -> Result<Self, DecodeError> {
let input_descriptor = match <u8 as Readable>::read(reader)? {
0 => {
InputDescriptors::RevokedOfferedHTLC
},
1 => {
InputDescriptors::RevokedReceivedHTLC
},
2 => {
InputDescriptors::OfferedHTLC
},
3 => {
InputDescriptors::ReceivedHTLC
},
4 => {
InputDescriptors::RevokedOutput
}
_ => return Err(DecodeError::InvalidValue),
};
Ok(input_descriptor)
}
}
pub(crate) fn get_witnesses_weight(inputs: &[InputDescriptors]) -> usize {
let mut tx_weight = 2; // count segwit flags
for inp in inputs {
// We use expected weight (and not actual) as signatures and time lock delays may vary
tx_weight += match inp {
// number_of_witness_elements + sig_length + revocation_sig + pubkey_length + revocationpubkey + witness_script_length + witness_script
&InputDescriptors::RevokedOfferedHTLC => {
1 + 1 + 73 + 1 + 33 + 1 + 133
},
// number_of_witness_elements + sig_length + revocation_sig + pubkey_length + revocationpubkey + witness_script_length + witness_script
&InputDescriptors::RevokedReceivedHTLC => {
1 + 1 + 73 + 1 + 33 + 1 + 139
},
// number_of_witness_elements + sig_length + counterpartyhtlc_sig + preimage_length + preimage + witness_script_length + witness_script
&InputDescriptors::OfferedHTLC => {
1 + 1 + 73 + 1 + 32 + 1 + 133
},
// number_of_witness_elements + sig_length + revocation_sig + pubkey_length + revocationpubkey + witness_script_length + witness_script
&InputDescriptors::ReceivedHTLC => {
1 + 1 + 73 + 1 + 1 + 1 + 139
},
// number_of_witness_elements + sig_length + revocation_sig + true_length + op_true + witness_script_length + witness_script
&InputDescriptors::RevokedOutput => {
1 + 1 + 73 + 1 + 1 + 1 + 77
},
};
}
tx_weight
}
/// Attempt to propose a bumping fee for a transaction from its spent output's values and predicted
/// weight. We start with the highest priority feerate returned by the node's fee estimator then
/// fall-back to lower priorities until we have enough value available to suck from.