Merge pull request #3239 from arik-so/bitcoin-0.32.2-upgrade

Bitcoin 0.32.2 upgrade
This commit is contained in:
Matt Corallo 2024-08-16 20:13:26 +00:00 committed by GitHub
commit 43dcf2f3d8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
75 changed files with 617 additions and 528 deletions

View file

@ -22,7 +22,7 @@ lightning = { path = "../lightning", features = ["regex", "hashbrown", "_test_ut
lightning-invoice = { path = "../lightning-invoice" }
lightning-rapid-gossip-sync = { path = "../lightning-rapid-gossip-sync" }
bech32 = "0.9.1"
bitcoin = { version = "0.31.2", features = ["secp-lowmemory"] }
bitcoin = { version = "0.32.2", features = ["secp-lowmemory"] }
afl = { version = "0.12", optional = true }
honggfuzz = { version = "0.5", optional = true, default-features = false }

View file

@ -80,8 +80,8 @@ use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature};
use bitcoin::secp256k1::schnorr;
use bitcoin::secp256k1::{self, Message, PublicKey, Scalar, Secp256k1, SecretKey};
use lightning::io::Cursor;
use std::cmp::{self, Ordering};
use std::io::Cursor;
use std::mem;
use std::sync::atomic;
use std::sync::{Arc, Mutex};
@ -153,7 +153,7 @@ impl BroadcasterInterface for TestBroadcaster {
pub struct VecWriter(pub Vec<u8>);
impl Writer for VecWriter {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::lightning::io::Error> {
self.0.extend_from_slice(buf);
Ok(())
}
@ -393,7 +393,7 @@ impl SignerProvider for KeyProvider {
}
fn read_chan_signer(&self, buffer: &[u8]) -> Result<Self::EcdsaSigner, DecodeError> {
let mut reader = std::io::Cursor::new(buffer);
let mut reader = lightning::io::Cursor::new(buffer);
let inner: InMemorySigner = ReadableArgs::read(&mut reader, self)?;
let state = self.make_enforcement_state_cell(inner.commitment_seed);
@ -879,7 +879,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
script_pubkey: output_script,
}],
};
funding_output = OutPoint { txid: tx.txid(), index: 0 };
funding_output = OutPoint { txid: tx.compute_txid(), index: 0 };
$source
.funding_transaction_generated(
temporary_channel_id,

View file

@ -10,11 +10,11 @@ use lightning::util::test_utils::OnlyReadsKeysInterface;
use crate::utils::test_logger;
use std::io::Cursor;
use lightning::io::Cursor;
struct VecWriter(Vec<u8>);
impl Writer for VecWriter {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::lightning::io::Error> {
self.0.extend_from_slice(buf);
Ok(())
}

View file

@ -120,8 +120,8 @@ impl InputData {
Some(&self.data[old_pos..old_pos + len])
}
}
impl std::io::Read for &InputData {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
impl lightning::io::Read for &InputData {
fn read(&mut self, buf: &mut [u8]) -> lightning::io::Result<usize> {
if let Some(sl) = self.get_slice(buf.len()) {
buf.copy_from_slice(sl);
Ok(buf.len())
@ -305,7 +305,7 @@ impl<'a> MoneyLossDetector<'a> {
fn connect_block(&mut self, all_txn: &[Transaction]) {
let mut txdata = Vec::with_capacity(all_txn.len());
for (idx, tx) in all_txn.iter().enumerate() {
let txid = tx.txid();
let txid = tx.compute_txid();
self.txids_confirmed.entry(txid).or_insert_with(|| {
txdata.push((idx + 1, tx));
self.height
@ -897,7 +897,7 @@ pub fn do_test(mut data: &[u8], logger: &Arc<dyn Logger>) {
if tx.version.0 > 0xff {
break;
}
let funding_txid = tx.txid();
let funding_txid = tx.compute_txid();
if loss_detector.txids_confirmed.get(&funding_txid).is_none() {
let outpoint = OutPoint { txid: funding_txid, index: 0 };
for chan in channelmanager.list_channels() {
@ -922,7 +922,7 @@ pub fn do_test(mut data: &[u8], logger: &Arc<dyn Logger>) {
panic!();
}
}
let funding_txid = tx.txid();
let funding_txid = tx.compute_txid();
for idx in 0..tx.output.len() {
let outpoint = OutPoint { txid: funding_txid, index: idx as u16 };
pending_funding_signatures.insert(outpoint, tx.clone());

View file

@ -12,7 +12,7 @@
use lightning::util::ser::Writer;
pub struct VecWriter(pub Vec<u8>);
impl Writer for VecWriter {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::lightning::io::Error> {
self.0.extend_from_slice(buf);
Ok(())
}
@ -31,7 +31,7 @@ impl Writer for VecWriter {
macro_rules! test_msg {
($MsgType: path, $data: ident) => {{
use lightning::util::ser::{Readable, Writeable};
let mut r = ::std::io::Cursor::new($data);
let mut r = ::lightning::io::Cursor::new($data);
if let Ok(msg) = <$MsgType as Readable>::read(&mut r) {
let p = r.position() as usize;
let mut w = VecWriter(Vec::new());
@ -50,13 +50,14 @@ macro_rules! test_msg {
macro_rules! test_msg_simple {
($MsgType: path, $data: ident) => {{
use lightning::util::ser::{Readable, Writeable};
let mut r = ::std::io::Cursor::new($data);
let mut r = ::lightning::io::Cursor::new($data);
if let Ok(msg) = <$MsgType as Readable>::read(&mut r) {
let mut w = VecWriter(Vec::new());
msg.write(&mut w).unwrap();
assert_eq!(msg.serialized_length(), w.0.len());
let msg = <$MsgType as Readable>::read(&mut ::std::io::Cursor::new(&w.0)).unwrap();
let msg =
<$MsgType as Readable>::read(&mut ::lightning::io::Cursor::new(&w.0)).unwrap();
let mut w_two = VecWriter(Vec::new());
msg.write(&mut w_two).unwrap();
assert_eq!(&w.0[..], &w_two.0[..]);
@ -70,7 +71,7 @@ macro_rules! test_msg_simple {
macro_rules! test_msg_exact {
($MsgType: path, $data: ident) => {{
use lightning::util::ser::{Readable, Writeable};
let mut r = ::std::io::Cursor::new($data);
let mut r = ::lightning::io::Cursor::new($data);
if let Ok(msg) = <$MsgType as Readable>::read(&mut r) {
let mut w = VecWriter(Vec::new());
msg.write(&mut w).unwrap();
@ -86,7 +87,7 @@ macro_rules! test_msg_exact {
macro_rules! test_msg_hole {
($MsgType: path, $data: ident, $hole: expr, $hole_len: expr) => {{
use lightning::util::ser::{Readable, Writeable};
let mut r = ::std::io::Cursor::new($data);
let mut r = ::lightning::io::Cursor::new($data);
if let Ok(msg) = <$MsgType as Readable>::read(&mut r) {
let mut w = VecWriter(Vec::new());
msg.write(&mut w).unwrap();

View file

@ -17,7 +17,7 @@ use lightning::util::test_utils;
pub fn onion_hop_data_test<Out: test_logger::Output>(data: &[u8], _out: Out) {
use bitcoin::secp256k1::PublicKey;
use lightning::util::ser::ReadableArgs;
let mut r = ::std::io::Cursor::new(data);
let mut r = ::lightning::io::Cursor::new(data);
let node_signer = test_utils::TestNodeSigner::new(test_utils::privkey(42));
let _ = <lightning::ln::msgs::InboundOnionPayload as ReadableArgs<(
Option<PublicKey>,
@ -30,7 +30,7 @@ pub extern "C" fn onion_hop_data_run(data: *const u8, datalen: usize) {
use bitcoin::secp256k1::PublicKey;
use lightning::util::ser::ReadableArgs;
let data = unsafe { std::slice::from_raw_parts(data, datalen) };
let mut r = ::std::io::Cursor::new(data);
let mut r = ::lightning::io::Cursor::new(data);
let node_signer = test_utils::TestNodeSigner::new(test_utils::privkey(42));
let _ = <lightning::ln::msgs::InboundOnionPayload as ReadableArgs<(
Option<PublicKey>,

View file

@ -30,7 +30,7 @@ use lightning_invoice::RawBolt11Invoice;
use crate::utils::test_logger;
use std::io::{self, Cursor};
use lightning::io::{self, Cursor};
use std::sync::atomic::{AtomicU64, Ordering};
#[inline]
@ -168,7 +168,7 @@ impl CustomOnionMessageHandler for TestCustomMessageHandler {
&self, _message_type: u64, buffer: &mut R,
) -> Result<Option<Self::CustomMessage>, msgs::DecodeError> {
let mut buf = Vec::new();
buffer.read_to_end(&mut buf)?;
buffer.read_to_limit(&mut buf, u64::MAX)?;
return Ok(Some(TestCustomMessage {}));
}
fn release_pending_custom_messages(&self) -> Vec<PendingOnionMessage<Self::CustomMessage>> {
@ -178,7 +178,7 @@ impl CustomOnionMessageHandler for TestCustomMessageHandler {
pub struct VecWriter(pub Vec<u8>);
impl Writer for VecWriter {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> {
fn write_all(&mut self, buf: &[u8]) -> Result<(), ::lightning::io::Error> {
self.0.extend_from_slice(buf);
Ok(())
}

View file

@ -147,7 +147,7 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
macro_rules! decode_msg {
($MsgType: path, $len: expr) => {{
let mut reader = ::std::io::Cursor::new(get_slice!($len));
let mut reader = ::lightning::io::Cursor::new(get_slice!($len));
match <$MsgType>::read(&mut reader) {
Ok(msg) => {
assert_eq!(reader.position(), $len as u64);

View file

@ -16,12 +16,12 @@ rustdoc-args = ["--cfg", "docsrs"]
[features]
futures = [ ]
std = ["bitcoin/std", "lightning/std", "lightning-rapid-gossip-sync/std"]
no-std = ["bitcoin/no-std", "lightning/no-std", "lightning-rapid-gossip-sync/no-std"]
no-std = ["lightning/no-std", "lightning-rapid-gossip-sync/no-std"]
default = ["std"]
[dependencies]
bitcoin = { version = "0.31.2", default-features = false }
bitcoin = { version = "0.32.2", default-features = false }
lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false }
lightning-rapid-gossip-sync = { version = "0.0.123-beta", path = "../lightning-rapid-gossip-sync", default-features = false }

View file

@ -1326,7 +1326,7 @@ mod tests {
&& key == CHANNEL_MANAGER_PERSISTENCE_KEY
{
if let Some((error, message)) = self.manager_error {
return Err(std::io::Error::new(error, message));
return Err(std::io::Error::new(error, message).into());
}
}
@ -1344,7 +1344,7 @@ mod tests {
};
if let Some((error, message)) = self.graph_error {
return Err(std::io::Error::new(error, message));
return Err(std::io::Error::new(error, message).into());
}
}
@ -1353,7 +1353,7 @@ mod tests {
&& key == SCORER_PERSISTENCE_KEY
{
if let Some((error, message)) = self.scorer_error {
return Err(std::io::Error::new(error, message));
return Err(std::io::Error::new(error, message).into());
}
}
@ -1866,7 +1866,10 @@ mod tests {
nodes[0]
.node
.force_close_broadcasting_latest_txn(
&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 }),
&ChannelId::v1_from_funding_outpoint(OutPoint {
txid: tx.compute_txid(),
index: 0,
}),
&nodes[1].node.get_our_node_id(),
error_message.to_string(),
)
@ -2002,7 +2005,7 @@ mod tests {
match bp_future.await {
Ok(_) => panic!("Expected error persisting manager"),
Err(e) => {
assert_eq!(e.kind(), std::io::ErrorKind::Other);
assert_eq!(e.kind(), lightning::io::ErrorKind::Other);
assert_eq!(e.get_ref().unwrap().to_string(), "test");
},
}
@ -2134,7 +2137,7 @@ mod tests {
get_event_msg!(nodes[1], MessageSendEvent::SendChannelUpdate, node_0_id);
let broadcast_funding =
nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
assert_eq!(broadcast_funding.txid(), funding_tx.txid());
assert_eq!(broadcast_funding.compute_txid(), funding_tx.compute_txid());
assert!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().is_empty());
if !std::thread::panicking() {
@ -2212,7 +2215,7 @@ mod tests {
let sweep_tx_0 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
match tracked_output.status {
OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
assert_eq!(sweep_tx_0.txid(), latest_spending_tx.txid());
assert_eq!(sweep_tx_0.compute_txid(), latest_spending_tx.compute_txid());
},
_ => panic!("Unexpected status"),
}
@ -2224,7 +2227,7 @@ mod tests {
let sweep_tx_1 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
match tracked_output.status {
OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
assert_eq!(sweep_tx_1.txid(), latest_spending_tx.txid());
assert_eq!(sweep_tx_1.compute_txid(), latest_spending_tx.compute_txid());
},
_ => panic!("Unexpected status"),
}
@ -2236,7 +2239,7 @@ mod tests {
let sweep_tx_2 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
match tracked_output.status {
OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
assert_eq!(sweep_tx_2.compute_txid(), latest_spending_tx.compute_txid());
},
_ => panic!("Unexpected status"),
}
@ -2249,7 +2252,7 @@ mod tests {
let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
match tracked_output.status {
OutputSpendStatus::PendingThresholdConfirmations { latest_spending_tx, .. } => {
assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
assert_eq!(sweep_tx_2.compute_txid(), latest_spending_tx.compute_txid());
},
_ => panic!("Unexpected status"),
}
@ -2264,7 +2267,7 @@ mod tests {
let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
match tracked_output.status {
OutputSpendStatus::PendingThresholdConfirmations { latest_spending_tx, .. } => {
assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
assert_eq!(sweep_tx_2.compute_txid(), latest_spending_tx.compute_txid());
},
_ => panic!("Unexpected status"),
}

View file

@ -18,7 +18,7 @@ rest-client = [ "serde_json", "chunked_transfer" ]
rpc-client = [ "serde_json", "chunked_transfer" ]
[dependencies]
bitcoin = "0.31.2"
bitcoin = "0.32.2"
lightning = { version = "0.0.123-beta", path = "../lightning" }
tokio = { version = "1.35", features = [ "io-util", "net", "time", "rt" ], optional = true }
serde_json = { version = "1.0", optional = true }

View file

@ -622,10 +622,7 @@ pub(crate) mod tests {
match TryInto::<Txid>::try_into(response) {
Err(e) => {
assert_eq!(e.kind(), io::ErrorKind::InvalidData);
assert_eq!(
e.get_ref().unwrap().to_string(),
"bad hex string length 6 (expected 64)"
);
assert_eq!(e.get_ref().unwrap().to_string(), "failed to parse hex");
},
Ok(_) => panic!("Expected error"),
}
@ -637,10 +634,7 @@ pub(crate) mod tests {
match TryInto::<Txid>::try_into(response) {
Err(e) => {
assert_eq!(e.kind(), io::ErrorKind::InvalidData);
assert_eq!(
e.get_ref().unwrap().to_string(),
"bad hex string length 4 (expected 64)"
);
assert_eq!(e.get_ref().unwrap().to_string(), "failed to parse hex");
},
Ok(_) => panic!("Expected error"),
}

View file

@ -198,7 +198,7 @@ where
return Err(UtxoLookupError::UnknownTx);
}
outpoint = OutPoint::new(transaction.txid(), output_index.into());
outpoint = OutPoint::new(transaction.compute_txid(), output_index.into());
output = transaction.output[output_index as usize].clone();
}};
}

View file

@ -59,7 +59,7 @@ where
///
/// use lightning_block_sync::*;
///
/// use std::io::Cursor;
/// use lightning::io::Cursor;
///
/// async fn init_sync<
/// B: BlockSource,

View file

@ -144,8 +144,8 @@ impl ValidatedBlockHeader {
if self.height % 2016 == 0 {
let target = self.header.target();
let previous_target = previous_header.header.target();
let min_target = previous_target.min_difficulty_transition_threshold();
let max_target = previous_target.max_difficulty_transition_threshold();
let min_target = previous_target.min_transition_threshold();
let max_target = previous_target.max_transition_threshold_unchecked();
if target > max_target || target < min_target {
return Err(BlockSourceError::persistent("invalid difficulty transition"));
}

View file

@ -53,7 +53,7 @@ impl Blockchain {
input: vec![],
output: vec![],
};
let merkle_root = TxMerkleNode::from_raw_hash(coinbase.txid().to_raw_hash());
let merkle_root = TxMerkleNode::from_raw_hash(coinbase.compute_txid().to_raw_hash());
self.blocks.push(Block {
header: Header {
version: Version::NO_SOFT_FORK_SIGNALLING,

View file

@ -9,7 +9,6 @@ pub fn hex_to_work(hex: &str) -> Result<Work, HexToArrayError> {
#[cfg(test)]
mod tests {
use super::*;
use bitcoin::hex::HexToBytesError;
use bitcoin::pow::Work;
#[test]
@ -20,31 +19,25 @@ mod tests {
#[test]
fn hex_to_work_too_short_str() {
let hex = String::from_utf8(vec![b'0'; 32]).unwrap();
assert_eq!(hex_to_work(&hex), Err(HexToArrayError::InvalidLength(32, 64)));
assert!(hex_to_work(&hex).is_err());
}
#[test]
fn hex_to_work_too_long_str() {
let hex = String::from_utf8(vec![b'0'; 128]).unwrap();
assert_eq!(hex_to_work(&hex), Err(HexToArrayError::InvalidLength(128, 64)));
assert!(hex_to_work(&hex).is_err());
}
#[test]
fn hex_to_work_odd_length_str() {
let hex = String::from_utf8(vec![b'0'; 65]).unwrap();
assert_eq!(
hex_to_work(&hex),
Err(HexToArrayError::Conversion(HexToBytesError::OddLengthString(65)))
);
assert!(hex_to_work(&hex).is_err());
}
#[test]
fn hex_to_work_invalid_char() {
let hex = String::from_utf8(vec![b'G'; 64]).unwrap();
assert_eq!(
hex_to_work(&hex),
Err(HexToArrayError::Conversion(HexToBytesError::InvalidChar(b'G')))
);
assert!(hex_to_work(&hex).is_err());
}
#[test]

View file

@ -14,7 +14,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
bitcoin = "0.31.2"
bitcoin = "0.32.2"
lightning = { version = "0.0.123-beta", path = "../lightning" }
[lints]

View file

@ -16,15 +16,14 @@ rustdoc-args = ["--cfg", "docsrs"]
[features]
default = ["std"]
no-std = ["bitcoin/no-std"]
std = ["bitcoin/std", "bech32/std"]
no-std = []
std = ["bech32/std"]
[dependencies]
bech32 = { version = "0.9.1", default-features = false }
lightning-types = { version = "0.1", path = "../lightning-types", default-features = false }
secp256k1 = { version = "0.28.0", default-features = false, features = ["recovery", "alloc"] }
serde = { version = "1.0.118", optional = true }
bitcoin = { version = "0.31.2", default-features = false }
bitcoin = { version = "0.32.2", default-features = false, features = ["secp-recovery"] }
[dev-dependencies]
serde_json = { version = "1"}

View file

@ -17,8 +17,8 @@ use crate::prelude::*;
use lightning_types::payment::PaymentSecret;
use lightning_types::routing::{RoutingFees, RouteHint, RouteHintHop};
use secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use secp256k1::PublicKey;
use bitcoin::secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use bitcoin::secp256k1::PublicKey;
use super::{Bolt11Invoice, Sha256, TaggedField, ExpiryTime, MinFinalCltvExpiryDelta, Fallback, PayeePubKey, Bolt11InvoiceSignature, PositiveTimestamp,
Bolt11SemanticError, PrivateRoute, Bolt11ParseError, ParseOrSemanticError, Description, RawTaggedField, Currency, RawHrp, SiPrefix, RawBolt11Invoice,
@ -698,7 +698,7 @@ macro_rules! from_error {
}
}
from_error!(Bolt11ParseError::MalformedSignature, secp256k1::Error);
from_error!(Bolt11ParseError::MalformedSignature, bitcoin::secp256k1::Error);
from_error!(Bolt11ParseError::ParseAmountError, ParseIntError);
from_error!(Bolt11ParseError::DescriptionDecodeError, str::Utf8Error);
@ -726,7 +726,7 @@ impl From<crate::Bolt11SemanticError> for ParseOrSemanticError {
#[cfg(test)]
mod test {
use crate::de::Bolt11ParseError;
use secp256k1::PublicKey;
use bitcoin::secp256k1::PublicKey;
use bech32::u5;
use bitcoin::hashes::sha256;
use std::str::FromStr;
@ -973,7 +973,7 @@ mod test {
#[test]
fn test_payment_secret_and_features_de_and_ser() {
use lightning_types::features::Bolt11InvoiceFeatures;
use secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use bitcoin::secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use crate::TaggedField::*;
use crate::{SiPrefix, SignedRawBolt11Invoice, Bolt11InvoiceSignature, RawBolt11Invoice, RawHrp, RawDataPart,
Currency, Sha256, PositiveTimestamp};
@ -1020,7 +1020,7 @@ mod test {
#[test]
fn test_raw_signed_invoice_deserialization() {
use crate::TaggedField::*;
use secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use bitcoin::secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use crate::{SignedRawBolt11Invoice, Bolt11InvoiceSignature, RawBolt11Invoice, RawHrp, RawDataPart, Currency, Sha256,
PositiveTimestamp};

View file

@ -27,7 +27,6 @@ compile_error!("at least one of the `std` or `no-std` features must be enabled")
extern crate bech32;
extern crate lightning_types;
extern crate secp256k1;
extern crate alloc;
#[cfg(any(test, feature = "std"))]
extern crate core;
@ -39,13 +38,12 @@ use std::time::SystemTime;
use bech32::{FromBase32, u5};
use bitcoin::{Address, Network, PubkeyHash, ScriptHash, WitnessProgram, WitnessVersion};
use bitcoin::address::Payload;
use bitcoin::hashes::{Hash, sha256};
use lightning_types::features::Bolt11InvoiceFeatures;
use secp256k1::PublicKey;
use secp256k1::{Message, Secp256k1};
use secp256k1::ecdsa::RecoverableSignature;
use bitcoin::secp256k1::PublicKey;
use bitcoin::secp256k1::{Message, Secp256k1};
use bitcoin::secp256k1::ecdsa::RecoverableSignature;
use core::cmp::Ordering;
use core::fmt::{Display, Formatter, self};
@ -85,7 +83,7 @@ use crate::prelude::*;
pub enum Bolt11ParseError {
Bech32Error(bech32::Error),
ParseAmountError(ParseIntError),
MalformedSignature(secp256k1::Error),
MalformedSignature(bitcoin::secp256k1::Error),
BadPrefix,
UnknownCurrency,
UnknownSiPrefix,
@ -142,15 +140,14 @@ pub const DEFAULT_MIN_FINAL_CLTV_EXPIRY_DELTA: u64 = 18;
/// ensures that only a semantically and syntactically correct invoice can be built using it.
///
/// ```
/// extern crate secp256k1;
/// extern crate lightning_invoice;
/// extern crate bitcoin;
///
/// use bitcoin::hashes::Hash;
/// use bitcoin::hashes::sha256;
///
/// use secp256k1::Secp256k1;
/// use secp256k1::SecretKey;
/// use bitcoin::secp256k1::Secp256k1;
/// use bitcoin::secp256k1::SecretKey;
///
/// use lightning_types::payment::PaymentSecret;
///
@ -867,7 +864,7 @@ impl SignedRawBolt11Invoice {
}
/// Recovers the public key used for signing the invoice from the recoverable signature.
pub fn recover_payee_pub_key(&self) -> Result<PayeePubKey, secp256k1::Error> {
pub fn recover_payee_pub_key(&self) -> Result<PayeePubKey, bitcoin::secp256k1::Error> {
let hash = Message::from_digest(self.hash);
Ok(PayeePubKey(Secp256k1::new().recover_ecdsa(
@ -1249,9 +1246,9 @@ impl Bolt11Invoice {
/// Check that the invoice is signed correctly and that key recovery works
pub fn check_signature(&self) -> Result<(), Bolt11SemanticError> {
match self.signed_invoice.recover_payee_pub_key() {
Err(secp256k1::Error::InvalidRecoveryId) =>
Err(bitcoin::secp256k1::Error::InvalidRecoveryId) =>
return Err(Bolt11SemanticError::InvalidRecoveryId),
Err(secp256k1::Error::InvalidSignature) =>
Err(bitcoin::secp256k1::Error::InvalidSignature) =>
return Err(Bolt11SemanticError::InvalidSignature),
Err(e) => panic!("no other error may occur, got {:?}", e),
Ok(_) => {},
@ -1434,22 +1431,22 @@ impl Bolt11Invoice {
/// Returns a list of all fallback addresses as [`Address`]es
pub fn fallback_addresses(&self) -> Vec<Address> {
self.fallbacks().iter().filter_map(|fallback| {
let payload = match fallback {
let address = match fallback {
Fallback::SegWitProgram { version, program } => {
match WitnessProgram::new(*version, program.clone()) {
Ok(witness_program) => Payload::WitnessProgram(witness_program),
match WitnessProgram::new(*version, &program) {
Ok(witness_program) => Address::from_witness_program(witness_program, self.network()),
Err(_) => return None,
}
}
Fallback::PubKeyHash(pkh) => {
Payload::PubkeyHash(*pkh)
Address::p2pkh(*pkh, self.network())
}
Fallback::ScriptHash(sh) => {
Payload::ScriptHash(*sh)
Address::p2sh_from_hash(*sh, self.network())
}
};
Some(Address::new(self.network(), payload))
Some(address)
}).collect()
}
@ -1812,9 +1809,9 @@ mod test {
#[test]
fn test_check_signature() {
use crate::TaggedField::*;
use secp256k1::Secp256k1;
use secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use secp256k1::{SecretKey, PublicKey};
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::ecdsa::{RecoveryId, RecoverableSignature};
use bitcoin::secp256k1::{SecretKey, PublicKey};
use crate::{SignedRawBolt11Invoice, Bolt11InvoiceSignature, RawBolt11Invoice, RawHrp, RawDataPart, Currency, Sha256,
PositiveTimestamp};
@ -1882,8 +1879,8 @@ mod test {
fn test_check_feature_bits() {
use crate::TaggedField::*;
use lightning_types::features::Bolt11InvoiceFeatures;
use secp256k1::Secp256k1;
use secp256k1::SecretKey;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::SecretKey;
use crate::{Bolt11Invoice, RawBolt11Invoice, RawHrp, RawDataPart, Currency, Sha256, PositiveTimestamp,
Bolt11SemanticError};
@ -2004,7 +2001,7 @@ mod test {
use crate::*;
use lightning_types::routing::RouteHintHop;
use std::iter::FromIterator;
use secp256k1::PublicKey;
use bitcoin::secp256k1::PublicKey;
let builder = InvoiceBuilder::new(Currency::Bitcoin)
.payment_hash(sha256::Hash::from_slice(&[0;32][..]).unwrap())
@ -2057,8 +2054,8 @@ mod test {
fn test_builder_ok() {
use crate::*;
use lightning_types::routing::RouteHintHop;
use secp256k1::Secp256k1;
use secp256k1::{SecretKey, PublicKey};
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::{SecretKey, PublicKey};
use std::time::Duration;
let secp_ctx = Secp256k1::new();
@ -2178,8 +2175,8 @@ mod test {
#[test]
fn test_default_values() {
use crate::*;
use secp256k1::Secp256k1;
use secp256k1::SecretKey;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::SecretKey;
let signed_invoice = InvoiceBuilder::new(Currency::Bitcoin)
.description("Test".into())
@ -2204,8 +2201,8 @@ mod test {
#[test]
fn test_expiration() {
use crate::*;
use secp256k1::Secp256k1;
use secp256k1::SecretKey;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::SecretKey;
let signed_invoice = InvoiceBuilder::new(Currency::Bitcoin)
.description("Test".into())

View file

@ -297,7 +297,7 @@ impl ToBase32 for PayeePubKey {
impl Base32Len for PayeePubKey {
fn base32_len(&self) -> usize {
bytes_size_to_base32_size(secp256k1::constants::PUBLIC_KEY_SIZE)
bytes_size_to_base32_size(bitcoin::secp256k1::constants::PUBLIC_KEY_SIZE)
}
}

View file

@ -1,13 +1,12 @@
extern crate bech32;
extern crate lightning_invoice;
extern crate secp256k1;
use bitcoin::{PubkeyHash, ScriptHash, WitnessVersion};
use bitcoin::hex::FromHex;
use bitcoin::hashes::{sha256, Hash};
use lightning_invoice::*;
use secp256k1::PublicKey;
use secp256k1::ecdsa::{RecoverableSignature, RecoveryId};
use bitcoin::secp256k1::PublicKey;
use bitcoin::secp256k1::ecdsa::{RecoverableSignature, RecoveryId};
use std::collections::HashSet;
use std::time::Duration;
use std::str::FromStr;

View file

@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
bitcoin = "0.31.2"
bitcoin = "0.32.2"
lightning = { version = "0.0.123-beta", path = "../lightning" }
tokio = { version = "1.35", features = [ "rt", "sync", "net", "time" ] }

View file

@ -14,7 +14,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
bitcoin = "0.31.2"
bitcoin = "0.32.2"
lightning = { version = "0.0.123-beta", path = "../lightning" }
[target.'cfg(windows)'.dependencies]
@ -25,7 +25,7 @@ criterion = { version = "0.4", optional = true, default-features = false }
[dev-dependencies]
lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
bitcoin = { version = "0.31.2", default-features = false }
bitcoin = { version = "0.32.2", default-features = false }
[lints]
workspace = true

View file

@ -91,7 +91,7 @@ impl FilesystemStore {
}
impl KVStore for FilesystemStore {
fn read(&self, primary_namespace: &str, secondary_namespace: &str, key: &str) -> std::io::Result<Vec<u8>> {
fn read(&self, primary_namespace: &str, secondary_namespace: &str, key: &str) -> lightning::io::Result<Vec<u8>> {
check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "read")?;
let mut dest_file_path = self.get_dest_dir_path(primary_namespace, secondary_namespace)?;
@ -114,7 +114,7 @@ impl KVStore for FilesystemStore {
Ok(buf)
}
fn write(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8]) -> std::io::Result<()> {
fn write(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8]) -> lightning::io::Result<()> {
check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?;
let mut dest_file_path = self.get_dest_dir_path(primary_namespace, secondary_namespace)?;
@ -191,7 +191,7 @@ impl KVStore for FilesystemStore {
dest_file.sync_all()?;
Ok(())
}
Err(e) => Err(e),
Err(e) => Err(e.into()),
}
}
};
@ -201,7 +201,7 @@ impl KVStore for FilesystemStore {
res
}
fn remove(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool) -> std::io::Result<()> {
fn remove(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool) -> lightning::io::Result<()> {
check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?;
let mut dest_file_path = self.get_dest_dir_path(primary_namespace, secondary_namespace)?;
@ -290,7 +290,7 @@ impl KVStore for FilesystemStore {
Ok(())
}
fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> std::io::Result<Vec<String>> {
fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> lightning::io::Result<Vec<String>> {
check_namespace_key_validity(primary_namespace, secondary_namespace, None, "list")?;
let prefixed_dest = self.get_dest_dir_path(primary_namespace, secondary_namespace)?;
@ -331,7 +331,7 @@ impl KVStore for FilesystemStore {
PrintableString(primary_namespace), PrintableString(secondary_namespace));
let msg = format!("Failed to list keys of {}/{}: file couldn't be accessed.",
PrintableString(primary_namespace), PrintableString(secondary_namespace));
return Err(std::io::Error::new(std::io::ErrorKind::Other, msg));
return Err(lightning::io::Error::new(lightning::io::ErrorKind::Other, msg));
}
match p.strip_prefix(&prefixed_dest) {
@ -345,7 +345,7 @@ impl KVStore for FilesystemStore {
PrintableString(primary_namespace), PrintableString(secondary_namespace));
let msg = format!("Failed to list keys of {}/{}: file path is not valid UTF-8",
PrintableString(primary_namespace), PrintableString(secondary_namespace));
return Err(std::io::Error::new(std::io::ErrorKind::Other, msg));
return Err(lightning::io::Error::new(lightning::io::ErrorKind::Other, msg));
}
}
Err(e) => {
@ -353,7 +353,7 @@ impl KVStore for FilesystemStore {
PrintableString(primary_namespace), PrintableString(secondary_namespace), e);
let msg = format!("Failed to list keys of {}/{}: {}",
PrintableString(primary_namespace), PrintableString(secondary_namespace), e);
return Err(std::io::Error::new(std::io::ErrorKind::Other, msg));
return Err(lightning::io::Error::new(lightning::io::ErrorKind::Other, msg));
}
}
}

View file

@ -16,7 +16,7 @@ std = ["lightning/std"]
[dependencies]
lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false }
bitcoin = { version = "0.31.2", default-features = false }
bitcoin = { version = "0.32.2", default-features = false }
[target.'cfg(ldk_bench)'.dependencies]
criterion = { version = "0.4", optional = true, default-features = false }

View file

@ -70,9 +70,6 @@ extern crate alloc;
use core::ops::Deref;
use core::sync::atomic::{AtomicBool, Ordering};
#[cfg(feature = "std")]
use std::fs::File;
use lightning::io;
use lightning::ln::msgs::{DecodeError, LightningError};
use lightning::routing::gossip::NetworkGraph;
@ -146,12 +143,16 @@ where
///
/// `sync_path`: Path to the file where the gossip update data is located
///
#[cfg(feature = "std")]
#[cfg(all(feature = "std", not(feature = "no-std")))]
pub fn sync_network_graph_with_file_path(
&self, sync_path: &str,
) -> Result<u32, GraphSyncError> {
let mut file = File::open(sync_path)?;
self.update_network_graph_from_byte_stream(&mut file)
let file = std::fs::File::open(sync_path).map_err(|e| {
let bitcoin_error: lightning::io::Error = e.into();
bitcoin_error
})?;
let mut buf_reader = std::io::BufReader::new(file);
self.update_network_graph_from_byte_stream(&mut buf_reader)
}
/// Update network graph from binary data.
@ -190,7 +191,7 @@ where
}
}
#[cfg(feature = "std")]
#[cfg(all(feature = "std", not(feature = "no-std")))]
#[cfg(test)]
mod tests {
use std::fs;

View file

@ -63,7 +63,7 @@ where
}
pub(crate) fn update_network_graph_from_byte_stream_no_std<R: io::Read>(
&self, mut read_cursor: &mut R, current_time_unix: Option<u64>,
&self, read_cursor: &mut R, current_time_unix: Option<u64>,
) -> Result<u32, GraphSyncError> {
log_trace!(self.logger, "Processing RGS data...");
let mut protocol_prefix = [0u8; 3];
@ -73,7 +73,7 @@ where
return Err(DecodeError::UnknownVersion.into());
}
let version: u8 = Readable::read(&mut read_cursor)?;
let version: u8 = Readable::read(read_cursor)?;
if version != 1 && version != 2 {
return Err(DecodeError::UnknownVersion.into());
}
@ -187,7 +187,7 @@ where
for address_index in 0..address_count {
let current_byte_count: u8 = Readable::read(read_cursor)?;
let mut address_reader =
FixedLengthReader::new(&mut read_cursor, current_byte_count as u64);
FixedLengthReader::new(read_cursor, current_byte_count as u64);
if let Ok(current_address) = Readable::read(&mut address_reader) {
node_addresses.push(current_address);
if address_reader.bytes_remain() {
@ -330,11 +330,11 @@ where
}
// obtain default values for non-incremental updates
let default_cltv_expiry_delta: u16 = Readable::read(&mut read_cursor)?;
let default_htlc_minimum_msat: u64 = Readable::read(&mut read_cursor)?;
let default_fee_base_msat: u32 = Readable::read(&mut read_cursor)?;
let default_fee_proportional_millionths: u32 = Readable::read(&mut read_cursor)?;
let default_htlc_maximum_msat: u64 = Readable::read(&mut read_cursor)?;
let default_cltv_expiry_delta: u16 = Readable::read(read_cursor)?;
let default_htlc_minimum_msat: u64 = Readable::read(read_cursor)?;
let default_fee_base_msat: u32 = Readable::read(read_cursor)?;
let default_fee_proportional_millionths: u32 = Readable::read(read_cursor)?;
let default_htlc_maximum_msat: u64 = Readable::read(read_cursor)?;
let mut previous_channel_direction = None;

View file

@ -24,18 +24,18 @@ async-interface = []
[dependencies]
lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false, features = ["std"] }
bitcoin = { version = "0.31.2", default-features = false }
bitcoin = { version = "0.32.2", default-features = false }
bdk-macros = "0.6"
futures = { version = "0.3", optional = true }
esplora-client = { version = "0.7", default-features = false, optional = true }
electrum-client = { version = "0.19.0", optional = true }
esplora-client = { version = "0.9", default-features = false, optional = true }
electrum-client = { version = "0.21.0", optional = true }
[dev-dependencies]
lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false, features = ["std", "_test_utils"] }
tokio = { version = "1.35.0", features = ["full"] }
[target.'cfg(not(target_os = "windows"))'.dev-dependencies]
electrsd = { version = "0.27.3", default-features = false, features = ["legacy"] }
electrsd = { version = "0.28.0", default-features = false, features = ["legacy"] }
[lints]
workspace = true

View file

@ -74,11 +74,12 @@ impl SyncState {
);
}
self.watched_transactions.remove(&ctx.tx.txid());
self.watched_transactions.remove(&ctx.tx.compute_txid());
for input in &ctx.tx.input {
if let Some(output) = self.watched_outputs.remove(&input.previous_output) {
let spent = (ctx.tx.txid(), ctx.block_height, input.previous_output, output);
let spent =
(ctx.tx.compute_txid(), ctx.block_height, input.previous_output, output);
self.outputs_spends_pending_threshold_conf.push(spent);
}
}

View file

@ -432,7 +432,7 @@ where
fn get_confirmed_tx(
&self, tx: &Transaction, prob_conf_height: u32,
) -> Result<ConfirmedTx, InternalError> {
let txid = tx.txid();
let txid = tx.compute_txid();
match self.client.transaction_get_merkle(&txid, prob_conf_height as usize) {
Ok(merkle_res) => {
debug_assert_eq!(prob_conf_height, merkle_res.block_height as u32);

View file

@ -367,7 +367,7 @@ where
// unwrap() safety: len() > 0 is checked above
let pos = *indexes.first().unwrap() as usize;
if let Some(tx) = maybe_await!(self.client.get_tx(&txid))? {
if tx.txid() != txid {
if tx.compute_txid() != txid {
log_error!(self.logger, "Retrieved transaction for txid {} doesn't match expectations. This should not happen. Please verify server integrity.", txid);
return Err(InternalError::Failed);
}

View file

@ -134,7 +134,7 @@ impl TestConfirmable {
impl Confirm for TestConfirmable {
fn transactions_confirmed(&self, header: &Header, txdata: &TransactionData<'_>, height: u32) {
for (_, tx) in txdata {
let txid = tx.txid();
let txid = tx.compute_txid();
let block_hash = header.block_hash();
self.confirmed_txs.lock().unwrap().insert(txid, (block_hash, height));
self.unconfirmed_txs.lock().unwrap().remove(&txid);
@ -205,7 +205,7 @@ macro_rules! test_syncing {
None,
)
.unwrap();
$tx_sync.register_tx(&txid, &new_address.payload().script_pubkey());
$tx_sync.register_tx(&txid, &new_address.script_pubkey());
maybe_await!($tx_sync.sync(vec![&$confirmable])).unwrap();

View file

@ -16,7 +16,7 @@ rustdoc-args = ["--cfg", "docsrs"]
_test_utils = []
[dependencies]
bitcoin = { version = "0.31", default-features = false }
bitcoin = { version = "0.32.2", default-features = false }
# TODO: Once we switch to bitcoin 0.32 drop this explicit dep:
hex-conservative = { version = "0.2", default-features = false }
bech32 = { version = "0.9", default-features = false }

View file

@ -31,8 +31,8 @@ unsafe_revoked_tx_signing = []
# Override signing to not include randomness when generating signatures for test vectors.
_test_vectors = []
no-std = ["hashbrown", "possiblyrandom", "bitcoin/no-std", "lightning-invoice/no-std", "core2/alloc", "libm"]
std = ["bitcoin/std", "bech32/std", "lightning-invoice/std"]
no-std = ["hashbrown", "possiblyrandom", "lightning-invoice/no-std", "libm"]
std = ["lightning-invoice/std", "bech32/std"]
# Generates low-r bitcoin signatures, which saves 1 byte in 50% of the cases
grind_signatures = []
@ -44,14 +44,13 @@ lightning-types = { version = "0.1", path = "../lightning-types", default-featur
lightning-invoice = { version = "0.31.0-beta", path = "../lightning-invoice", default-features = false }
bech32 = { version = "0.9.1", default-features = false }
bitcoin = { version = "0.31.2", default-features = false, features = ["secp-recovery"] }
bitcoin = { version = "0.32.2", default-features = false, features = ["secp-recovery"] }
hashbrown = { version = "0.13", optional = true, default-features = false }
possiblyrandom = { version = "0.2", optional = true, default-features = false }
regex = { version = "1.5.6", optional = true }
backtrace = { version = "0.3", optional = true }
core2 = { version = "0.3.0", optional = true, default-features = false }
libm = { version = "0.2", optional = true, default-features = false }
[dev-dependencies]
@ -59,7 +58,7 @@ regex = "1.5.6"
lightning-types = { version = "0.1", path = "../lightning-types", features = ["_test_utils"] }
[dev-dependencies.bitcoin]
version = "0.31.2"
version = "0.32.2"
default-features = false
features = ["bitcoinconsensus", "secp-recovery"]
@ -67,7 +66,7 @@ features = ["bitcoinconsensus", "secp-recovery"]
criterion = { version = "0.4", optional = true, default-features = false }
[target.'cfg(taproot)'.dependencies]
musig2 = { git = "https://github.com/arik-so/rust-musig2", rev = "739533fc" }
musig2 = { git = "https://github.com/arik-so/rust-musig2", rev = "6f95a05718cbb44d8fe3fa6021aea8117aa38d50" }
[lints]
workspace = true

View file

@ -151,14 +151,15 @@ impl Writeable for BlindedPath {
impl Readable for BlindedPath {
fn read<R: io::Read>(r: &mut R) -> Result<Self, DecodeError> {
let mut first_byte: u8 = Readable::read(r)?;
let first_byte: u8 = Readable::read(r)?;
let introduction_node = match first_byte {
0 => IntroductionNode::DirectedShortChannelId(Direction::NodeOne, Readable::read(r)?),
1 => IntroductionNode::DirectedShortChannelId(Direction::NodeTwo, Readable::read(r)?),
2|3 => {
use io::Read;
let mut pubkey_read = core::slice::from_mut(&mut first_byte).chain(r.by_ref());
IntroductionNode::NodeId(Readable::read(&mut pubkey_read)?)
let mut bytes = [0; 33];
bytes[0] = first_byte;
r.read_exact(&mut bytes[1..])?;
IntroductionNode::NodeId(Readable::read(&mut &bytes[..])?)
},
_ => return Err(DecodeError::InvalidValue),
};

View file

@ -30,9 +30,7 @@ use bitcoin::hashes::sha256::Hash as Sha256;
use bitcoin::hash_types::{Txid, BlockHash};
use bitcoin::ecdsa::Signature as BitcoinSignature;
use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature};
use bitcoin::secp256k1::{SecretKey, PublicKey};
use bitcoin::secp256k1;
use bitcoin::secp256k1::{self, SecretKey, PublicKey, Secp256k1, ecdsa::Signature};
use crate::ln::channel::INITIAL_COMMITMENT_NUMBER;
use crate::ln::types::{PaymentHash, PaymentPreimage, ChannelId};
@ -2143,7 +2141,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
} = &event.event {
if event.transaction.as_ref().map(|tx| tx.input.iter().any(|inp| {
if let Some(htlc_spend_txid) = htlc_spend_txid_opt {
tx.txid() == *htlc_spend_txid || inp.previous_output.txid == *htlc_spend_txid
tx.compute_txid() == *htlc_spend_txid || inp.previous_output.txid == *htlc_spend_txid
} else {
Some(inp.previous_output.txid) == confirmed_txid &&
inp.previous_output.vout == htlc_commitment_tx_output_idx
@ -2592,7 +2590,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
macro_rules! fail_unbroadcast_htlcs {
($self: expr, $commitment_tx_type: expr, $commitment_txid_confirmed: expr, $commitment_tx_confirmed: expr,
$commitment_tx_conf_height: expr, $commitment_tx_conf_hash: expr, $confirmed_htlcs_list: expr, $logger: expr) => { {
debug_assert_eq!($commitment_tx_confirmed.txid(), $commitment_txid_confirmed);
debug_assert_eq!($commitment_tx_confirmed.compute_txid(), $commitment_txid_confirmed);
macro_rules! check_htlc_fails {
($txid: expr, $commitment_tx: expr, $per_commitment_outpoints: expr) => {
@ -3225,7 +3223,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
// introduced with v0.0.116. counterparty_node_id is guaranteed to be `Some`
// since v0.0.110.
let counterparty_node_id = self.counterparty_node_id.unwrap();
let commitment_txid = commitment_tx.txid();
let commitment_txid = commitment_tx.compute_txid();
debug_assert_eq!(self.current_holder_commitment_tx.txid, commitment_txid);
let pending_htlcs = self.current_holder_commitment_tx.non_dust_htlcs();
let commitment_tx_fee_satoshis = self.channel_value_satoshis -
@ -3405,7 +3403,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let mut claimable_outpoints = Vec::new();
let mut to_counterparty_output_info = None;
let commitment_txid = tx.txid(); //TODO: This is gonna be a performance bottleneck for watchtowers!
let commitment_txid = tx.compute_txid(); //TODO: This is gonna be a performance bottleneck for watchtowers!
let per_commitment_option = self.counterparty_claimable_outpoints.get(&commitment_txid);
macro_rules! ignore_error {
@ -3591,7 +3589,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
};
let per_commitment_point = PublicKey::from_secret_key(&self.onchain_tx_handler.secp_ctx, &per_commitment_key);
let htlc_txid = tx.txid();
let htlc_txid = tx.compute_txid();
let mut claimable_outpoints = vec![];
let mut outputs_to_watch = None;
// Previously, we would only claim HTLCs from revoked HTLC transactions if they had 1 input
@ -3683,7 +3681,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
/// Should not be used if check_spend_revoked_transaction succeeds.
/// Returns None unless the transaction is definitely one of our commitment transactions.
fn check_spend_holder_transaction<L: Deref>(&mut self, tx: &Transaction, height: u32, block_hash: &BlockHash, logger: &L) -> Option<(Vec<PackageTemplate>, TransactionOutputs)> where L::Target: Logger {
let commitment_txid = tx.txid();
let commitment_txid = tx.compute_txid();
let mut claim_requests = Vec::new();
let mut watch_outputs = Vec::new();
@ -3788,7 +3786,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
) -> Vec<Transaction> where L::Target: Logger {
log_debug!(logger, "Getting signed copy of latest holder commitment transaction!");
let commitment_tx = self.onchain_tx_handler.get_fully_signed_copy_holder_tx(&self.funding_redeemscript);
let txid = commitment_tx.txid();
let txid = commitment_tx.compute_txid();
let mut holder_transactions = vec![commitment_tx];
// When anchor outputs are present, the HTLC transactions are only final once the commitment
// transaction confirms due to the CSV 1 encumberance.
@ -3887,7 +3885,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let mut watch_outputs = Vec::new();
let mut claimable_outpoints = Vec::new();
'tx_iter: for tx in &txn_matched {
let txid = tx.txid();
let txid = tx.compute_txid();
log_trace!(logger, "Transaction {} confirmed in block {}", txid , block_hash);
// If a transaction has already been confirmed, ensure we don't bother processing it duplicatively.
if Some(txid) == self.funding_spend_confirmed {
@ -4133,7 +4131,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
// make sure the registered scriptpubkey at the expected index match
// the actual transaction output one. We failed this case before #653.
for tx in &txn_matched {
if let Some(outputs) = self.get_outputs_to_watch().get(&tx.txid()) {
if let Some(outputs) = self.get_outputs_to_watch().get(&tx.compute_txid()) {
for idx_and_script in outputs.iter() {
assert!((idx_and_script.0 as usize) < tx.output.len());
assert_eq!(tx.output[idx_and_script.0 as usize].script_pubkey, idx_and_script.1);
@ -4208,7 +4206,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
}
}
if matches {
matched_txn.insert(tx.txid());
matched_txn.insert(tx.compute_txid());
}
matches
}).map(|(_, tx)| *tx).collect()
@ -4235,7 +4233,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
assert_eq!(&bitcoin::Address::p2wsh(&ScriptBuf::from(input.witness.last().unwrap().to_vec()), bitcoin::Network::Bitcoin).script_pubkey(), _script_pubkey);
} else if _script_pubkey.is_p2wpkh() {
assert_eq!(&bitcoin::Address::p2wpkh(&bitcoin::PublicKey::from_slice(&input.witness.last().unwrap()).unwrap(), bitcoin::Network::Bitcoin).unwrap().script_pubkey(), _script_pubkey);
assert_eq!(&bitcoin::Address::p2wpkh(&bitcoin::CompressedPublicKey(bitcoin::PublicKey::from_slice(&input.witness.last().unwrap()).unwrap().inner), bitcoin::Network::Bitcoin).script_pubkey(), _script_pubkey);
} else { panic!(); }
}
return true;
@ -4362,12 +4360,12 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
if ($holder_tx && revocation_sig_claim) ||
(outbound_htlc && !$source_avail && (accepted_preimage_claim || offered_preimage_claim)) {
log_error!(logger, "Input spending {} ({}:{}) in {} resolves {} HTLC with payment hash {} with {}!",
$tx_info, input.previous_output.txid, input.previous_output.vout, tx.txid(),
$tx_info, input.previous_output.txid, input.previous_output.vout, tx.compute_txid(),
if outbound_htlc { "outbound" } else { "inbound" }, &$htlc.payment_hash,
if revocation_sig_claim { "revocation sig" } else { "preimage claim after we'd passed the HTLC resolution back. We can likely claim the HTLC output with a revocation claim" });
} else {
log_info!(logger, "Input spending {} ({}:{}) in {} resolves {} HTLC with payment hash {} with {}",
$tx_info, input.previous_output.txid, input.previous_output.vout, tx.txid(),
$tx_info, input.previous_output.txid, input.previous_output.vout, tx.compute_txid(),
if outbound_htlc { "outbound" } else { "inbound" }, &$htlc.payment_hash,
if revocation_sig_claim { "revocation sig" } else if accepted_preimage_claim || offered_preimage_claim { "preimage" } else { "timeout" });
}
@ -4414,7 +4412,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
log_claim!($tx_info, $holder_tx, htlc_output, false);
let outbound_htlc = $holder_tx == htlc_output.offered;
self.onchain_events_awaiting_threshold_conf.push(OnchainEventEntry {
txid: tx.txid(), height, block_hash: Some(*block_hash), transaction: Some(tx.clone()),
txid: tx.compute_txid(), height, block_hash: Some(*block_hash), transaction: Some(tx.clone()),
event: OnchainEvent::HTLCSpendConfirmation {
commitment_tx_output_idx: input.previous_output.vout,
preimage: if accepted_preimage_claim || offered_preimage_claim {
@ -4456,7 +4454,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
if !self.pending_monitor_events.iter().any(
|update| if let &MonitorEvent::HTLCEvent(ref upd) = update { upd.source == source } else { false }) {
self.onchain_events_awaiting_threshold_conf.push(OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
height,
block_hash: Some(*block_hash),
transaction: Some(tx.clone()),
@ -4479,7 +4477,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
upd.source == source
} else { false }) {
self.onchain_events_awaiting_threshold_conf.push(OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
transaction: Some(tx.clone()),
height,
block_hash: Some(*block_hash),
@ -4507,7 +4505,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
}
});
let entry = OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
transaction: Some(tx.clone()),
height,
block_hash: Some(*block_hash),
@ -4529,7 +4527,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
for (i, outp) in tx.output.iter().enumerate() {
if outp.script_pubkey == self.destination_script {
spendable_outputs.push(SpendableOutputDescriptor::StaticOutput {
outpoint: OutPoint { txid: tx.txid(), index: i as u16 },
outpoint: OutPoint { txid: tx.compute_txid(), index: i as u16 },
output: outp.clone(),
channel_keys_id: Some(self.channel_keys_id),
});
@ -4537,7 +4535,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
if let Some(ref broadcasted_holder_revokable_script) = self.broadcasted_holder_revokable_script {
if broadcasted_holder_revokable_script.0 == outp.script_pubkey {
spendable_outputs.push(SpendableOutputDescriptor::DelayedPaymentOutput(DelayedPaymentOutputDescriptor {
outpoint: OutPoint { txid: tx.txid(), index: i as u16 },
outpoint: OutPoint { txid: tx.compute_txid(), index: i as u16 },
per_commitment_point: broadcasted_holder_revokable_script.1,
to_self_delay: self.on_holder_tx_csv,
output: outp.clone(),
@ -4550,7 +4548,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
}
if self.counterparty_payment_script == outp.script_pubkey {
spendable_outputs.push(SpendableOutputDescriptor::StaticPaymentOutput(StaticPaymentOutputDescriptor {
outpoint: OutPoint { txid: tx.txid(), index: i as u16 },
outpoint: OutPoint { txid: tx.compute_txid(), index: i as u16 },
output: outp.clone(),
channel_keys_id: self.channel_keys_id,
channel_value_satoshis: self.channel_value_satoshis,
@ -4559,7 +4557,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
}
if self.shutdown_script.as_ref() == Some(&outp.script_pubkey) {
spendable_outputs.push(SpendableOutputDescriptor::StaticOutput {
outpoint: OutPoint { txid: tx.txid(), index: i as u16 },
outpoint: OutPoint { txid: tx.compute_txid(), index: i as u16 },
output: outp.clone(),
channel_keys_id: Some(self.channel_keys_id),
});
@ -4575,7 +4573,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
) where L::Target: Logger {
for spendable_output in self.get_spendable_outputs(tx) {
let entry = OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
transaction: Some(tx.clone()),
height,
block_hash: Some(*block_hash),
@ -5068,7 +5066,7 @@ mod tests {
assert!(txn_broadcasted.len() >= 2);
let htlc_txn = txn_broadcasted.iter().filter(|tx| {
assert_eq!(tx.input.len(), 1);
tx.input[0].previous_output.txid == broadcast_tx.txid()
tx.input[0].previous_output.txid == broadcast_tx.compute_txid()
}).collect::<Vec<_>>();
assert_eq!(htlc_txn.len(), 2);
check_spends!(htlc_txn[0], broadcast_tx);

View file

@ -514,7 +514,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
log_info!(logger, "{} onchain {}", log_start, log_tx!(tx.0));
broadcaster.broadcast_transactions(&[&tx.0]);
} else {
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", tx.0.txid());
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", tx.0.compute_txid());
}
},
OnchainClaim::Event(event) => {
@ -535,7 +535,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
}
}
/// Returns true if we are currently tracking any pending claim requests that are not fully
/// Returns true if we are currently tracking any pending claim requests that are not fully
/// confirmed yet.
pub(super) fn has_pending_claims(&self) -> bool
{
@ -619,7 +619,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
let predicted_weight = cached_request.package_weight(&self.destination_script);
if let Some((output_value, new_feerate)) = cached_request.compute_package_output(
predicted_weight, self.destination_script.dust_value().to_sat(),
predicted_weight, self.destination_script.minimal_non_dust().to_sat(),
feerate_strategy, fee_estimator, logger,
) {
assert!(new_feerate != 0);
@ -647,7 +647,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
// Commitment inputs with anchors support are the only untractable inputs supported
// thus far that require external funding.
PackageSolvingData::HolderFundingOutput(output) => {
debug_assert_eq!(tx.0.txid(), self.holder_commitment.trust().txid(),
debug_assert_eq!(tx.0.compute_txid(), self.holder_commitment.trust().txid(),
"Holder commitment transaction mismatch");
let conf_target = ConfirmationTarget::OnChainSweep;
@ -659,7 +659,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
compute_feerate_sat_per_1000_weight(fee_sat, tx.0.weight().to_wu());
if commitment_tx_feerate_sat_per_1000_weight >= package_target_feerate_sat_per_1000_weight {
log_debug!(logger, "Pre-signed commitment {} already has feerate {} sat/kW above required {} sat/kW",
tx.0.txid(), commitment_tx_feerate_sat_per_1000_weight,
tx.0.compute_txid(), commitment_tx_feerate_sat_per_1000_weight,
package_target_feerate_sat_per_1000_weight);
return Some((new_timer, 0, OnchainClaim::Tx(tx.clone())));
}
@ -811,9 +811,9 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
log_info!(logger, "Broadcasting onchain {}", log_tx!(tx.0));
broadcaster.broadcast_transactions(&[&tx.0]);
} else {
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", tx.0.txid());
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", tx.0.compute_txid());
}
ClaimId(tx.0.txid().to_byte_array())
ClaimId(tx.0.compute_txid().to_byte_array())
},
OnchainClaim::Event(claim_event) => {
log_info!(logger, "Yielding onchain event to spend inputs {:?}", req.outpoints());
@ -821,7 +821,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
ClaimEvent::BumpCommitment { ref commitment_tx, .. } =>
// For commitment claims, we can just use their txid as it should
// already be unique.
ClaimId(commitment_tx.txid().to_byte_array()),
ClaimId(commitment_tx.compute_txid().to_byte_array()),
ClaimEvent::BumpHTLC { ref htlcs, .. } => {
// For HTLC claims, commit to the entire set of HTLC outputs to
// claim, which will always be unique per request. Once a claim ID
@ -900,7 +900,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
macro_rules! clean_claim_request_after_safety_delay {
() => {
let entry = OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
height: conf_height,
block_hash: Some(conf_hash),
event: OnchainEvent::Claim { claim_id: *claim_id }
@ -953,7 +953,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
}
for package in claimed_outputs_material.drain(..) {
let entry = OnchainEventEntry {
txid: tx.txid(),
txid: tx.compute_txid(),
height: conf_height,
block_hash: Some(conf_hash),
event: OnchainEvent::ContentiousOutpoint { package },
@ -1023,7 +1023,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
broadcaster.broadcast_transactions(&[&bump_tx.0]);
} else {
log_info!(logger, "Waiting for signature of RBF-bumped unsigned onchain transaction {}",
bump_tx.0.txid());
bump_tx.0.compute_txid());
}
},
OnchainClaim::Event(claim_event) => {
@ -1110,7 +1110,7 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
log_info!(logger, "Broadcasting onchain {}", log_tx!(bump_tx.0));
broadcaster.broadcast_transactions(&[&bump_tx.0]);
} else {
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", bump_tx.0.txid());
log_info!(logger, "Waiting for signature of unsigned onchain transaction {}", bump_tx.0.compute_txid());
}
},
OnchainClaim::Event(claim_event) => {

View file

@ -98,11 +98,11 @@ mod tests {
fn test_channel_id_calculation() {
let tx: Transaction = encode::deserialize(&<Vec<u8>>::from_hex("020000000001010e0adef48412e4361325ac1c6e36411299ab09d4f083b9d8ddb55fbc06e1b0c00000000000feffffff0220a1070000000000220020f81d95e040bd0a493e38bae27bff52fe2bb58b93b293eb579c01c31b05c5af1dc072cfee54a3000016001434b1d6211af5551905dc2642d05f5b04d25a8fe80247304402207f570e3f0de50546aad25a872e3df059d277e776dda4269fa0d2cc8c2ee6ec9a022054e7fae5ca94d47534c86705857c24ceea3ad51c69dd6051c5850304880fc43a012103cb11a1bacc223d98d91f1946c6752e358a5eb1a1c983b3e6fb15378f453b76bd00000000").unwrap()[..]).unwrap();
assert_eq!(&ChannelId::v1_from_funding_outpoint(OutPoint {
txid: tx.txid(),
txid: tx.compute_txid(),
index: 0
}).0[..], &<Vec<u8>>::from_hex("3e88dd7165faf7be58b3c5bb2c9c452aebef682807ea57080f62e6f6e113c25e").unwrap()[..]);
assert_eq!(&ChannelId::v1_from_funding_outpoint(OutPoint {
txid: tx.txid(),
txid: tx.compute_txid(),
index: 1
}).0[..], &<Vec<u8>>::from_hex("3e88dd7165faf7be58b3c5bb2c9c452aebef682807ea57080f62e6f6e113c25f").unwrap()[..]);
}

View file

@ -57,12 +57,12 @@ impl<T: Readable> LengthReadableArgs<[u8; 32]> for ChaChaPolyReadAdapter<T> {
// Simultaneously read and decrypt an object from a LengthRead, storing it in Self::readable.
// LengthRead must be used instead of std::io::Read because we need the total length to separate
// out the tag at the end.
fn read<R: LengthRead>(mut r: &mut R, secret: [u8; 32]) -> Result<Self, DecodeError> {
fn read<R: LengthRead>(r: &mut R, secret: [u8; 32]) -> Result<Self, DecodeError> {
if r.total_bytes() < 16 { return Err(DecodeError::InvalidValue) }
let mut chacha = ChaCha20Poly1305RFC::new(&secret, &[0; 12], &[]);
let decrypted_len = r.total_bytes() - 16;
let s = FixedLengthReader::new(&mut r, decrypted_len);
let s = FixedLengthReader::new(r, decrypted_len);
let mut chacha_stream = ChaChaPolyReader { chacha: &mut chacha, read: s };
let readable: T = Readable::read(&mut chacha_stream)?;
chacha_stream.read.eat_remaining()?;
@ -194,7 +194,7 @@ mod tests {
// Now deserialize the object back and make sure it matches the original.
let mut read_adapter: Option<ChaChaPolyReadAdapter<TestWriteable>> = None;
decode_tlv_stream!(&writer.0[..], {
decode_tlv_stream!(&mut &writer.0[..], {
(1, read_adapter, (option: LengthReadableArgs, rho)),
});
assert_eq!(writeable, read_adapter.unwrap().readable);

View file

@ -471,7 +471,7 @@ where
WITNESS_SCALE_FACTOR as u64,
);
let change_output_amount = Amount::from_sat(remaining_amount.to_sat().saturating_sub(change_output_fee));
let change_output = if change_output_amount < change_script.dust_value() {
let change_output = if change_output_amount < change_script.minimal_non_dust() {
log_debug!(self.logger, "Coin selection attempt did not yield change output");
None
} else {
@ -632,7 +632,7 @@ where
coin_selection.confirmed_utxos.iter().map(|utxo| utxo.output.value).sum();
self.process_coin_selection(&mut anchor_tx, &coin_selection);
let anchor_txid = anchor_tx.txid();
let anchor_txid = anchor_tx.compute_txid();
// construct psbt
let mut anchor_psbt = Psbt::from_unsigned_tx(anchor_tx).unwrap();
@ -679,7 +679,7 @@ where
}
log_info!(self.logger, "Broadcasting anchor transaction {} to bump channel close with txid {}",
anchor_txid, commitment_tx.txid());
anchor_txid, commitment_tx.compute_txid());
self.broadcaster.broadcast_transactions(&[&commitment_tx, &anchor_tx]);
Ok(())
}
@ -755,7 +755,7 @@ where
#[cfg(debug_assertions)]
let unsigned_tx_weight = htlc_psbt.unsigned_tx.weight().to_wu() - (htlc_psbt.unsigned_tx.input.len() as u64 * EMPTY_SCRIPT_SIG_WEIGHT);
log_debug!(self.logger, "Signing HTLC transaction {}", htlc_psbt.unsigned_tx.txid());
log_debug!(self.logger, "Signing HTLC transaction {}", htlc_psbt.unsigned_tx.compute_txid());
htlc_tx = self.utxo_source.sign_psbt(htlc_psbt)?;
let mut signers = BTreeMap::new();
@ -798,13 +798,13 @@ where
commitment_tx_fee_satoshis, anchor_descriptor, ..
} => {
log_info!(self.logger, "Handling channel close bump (claim_id = {}, commitment_txid = {})",
log_bytes!(claim_id.0), commitment_tx.txid());
log_bytes!(claim_id.0), commitment_tx.compute_txid());
if let Err(_) = self.handle_channel_close(
*claim_id, *package_target_feerate_sat_per_1000_weight, commitment_tx,
*commitment_tx_fee_satoshis, anchor_descriptor,
) {
log_error!(self.logger, "Failed bumping commitment transaction fee for {}",
commitment_tx.txid());
commitment_tx.compute_txid());
}
}
BumpTransactionEvent::HTLCResolution {

73
lightning/src/io/mod.rs Normal file
View file

@ -0,0 +1,73 @@
pub use bitcoin::io::*;
/// Emulation of std::io::Cursor
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct Cursor<T> {
inner: T,
pos: u64,
}
impl<T> Cursor<T> {
/// Creates a `Cursor` by wrapping `inner`.
#[inline]
pub fn new(inner: T) -> Cursor<T> {
Cursor { pos: 0, inner }
}
/// Returns the position read up to thus far.
#[inline]
pub fn position(&self) -> u64 {
self.pos
}
/// Returns the inner buffer.
///
/// This is the whole wrapped buffer, including the bytes already read.
#[inline]
pub fn into_inner(self) -> T {
self.inner
}
/// Gets a reference to the underlying value in this cursor.
pub fn get_ref(&self) -> &T {
&self.inner
}
/// Gets a mutable reference to the underlying value in this cursor.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying value as it may corrupt this cursor's position.
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Sets the position of this cursor.
pub fn set_position(&mut self, pos: u64) {
self.pos = pos;
}
}
impl<T: AsRef<[u8]>> Read for Cursor<T> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let n = Read::read(&mut self.fill_buf()?, buf)?;
self.pos += n as u64;
Ok(n)
}
fn read_exact(&mut self, buf: &mut [u8]) -> Result<()> {
let n = buf.len();
Read::read_exact(&mut self.fill_buf()?, buf)?;
self.pos += n as u64;
Ok(())
}
}
impl<T: AsRef<[u8]>> BufRead for Cursor<T> {
fn fill_buf(&mut self) -> Result<&[u8]> {
let amt = core::cmp::min(self.pos, self.inner.as_ref().len() as u64);
Ok(&self.inner.as_ref()[(amt as usize)..])
}
fn consume(&mut self, amt: usize) {
self.pos += amt as u64;
}
}

View file

@ -70,7 +70,6 @@ extern crate core;
#[cfg(any(test, feature = "_test_utils"))] extern crate regex;
#[cfg(not(feature = "std"))] extern crate core2;
#[cfg(not(feature = "std"))] extern crate libm;
#[cfg(ldk_bench)] extern crate criterion;
@ -88,42 +87,18 @@ pub mod events;
pub(crate) mod crypto;
#[cfg(feature = "std")]
/// Re-export of either `core2::io` or `std::io`, depending on the `std` feature flag.
pub use std::io;
#[cfg(not(feature = "std"))]
/// Re-export of either `core2::io` or `std::io`, depending on the `std` feature flag.
pub use core2::io;
/// Extension of the bitcoin::io module
pub mod io;
#[cfg(not(feature = "std"))]
#[doc(hidden)]
/// IO utilities public only for use by in-crate macros. These should not be used externally
///
/// This is not exported to bindings users as it is not intended for public consumption.
pub mod io_extras {
use core2::io::{self, Read, Write};
/// A writer which will move data into the void.
pub struct Sink {
_priv: (),
}
use bitcoin::io::{self, Read, Write};
/// Creates an instance of a writer which will successfully consume all data.
pub const fn sink() -> Sink {
Sink { _priv: () }
}
impl core2::io::Write for Sink {
#[inline]
fn write(&mut self, buf: &[u8]) -> core2::io::Result<usize> {
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> core2::io::Result<()> {
Ok(())
}
}
pub use bitcoin::io::sink;
pub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> Result<u64, io::Error>
where
@ -144,7 +119,7 @@ pub mod io_extras {
Ok(count)
}
pub fn read_to_end<D: io::Read>(mut d: D) -> Result<alloc::vec::Vec<u8>, io::Error> {
pub fn read_to_end<D: Read>(d: &mut D) -> Result<alloc::vec::Vec<u8>, io::Error> {
let mut result = vec![];
let mut buf = [0u8; 64];
loop {
@ -159,21 +134,6 @@ pub mod io_extras {
}
}
#[cfg(feature = "std")]
#[doc(hidden)]
/// IO utilities public only for use by in-crate macros. These should not be used externally
///
/// This is not exported to bindings users as it is not intended for public consumption.
mod io_extras {
pub fn read_to_end<D: ::std::io::Read>(mut d: D) -> Result<Vec<u8>, ::std::io::Error> {
let mut buf = Vec::new();
d.read_to_end(&mut buf)?;
Ok(buf)
}
pub use std::io::{copy, sink};
}
mod prelude {
#![allow(unused_imports)]

View file

@ -734,7 +734,7 @@ fn do_test_async_holder_signatures(anchors: bool, remote_commitment: bool) {
if anchors {
*nodes[0].fee_estimator.sat_per_kw.lock().unwrap() *= 2;
*nodes[1].fee_estimator.sat_per_kw.lock().unwrap() *= 2;
closing_node.wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
closing_node.wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
}
// Route an HTLC and set the signer as unavailable.
@ -773,7 +773,7 @@ fn do_test_async_holder_signatures(anchors: bool, remote_commitment: bool) {
txn.remove(0)
} else {
assert_eq!(txn.len(), 2);
if txn[0].input[0].previous_output.txid == funding_tx.txid() {
if txn[0].input[0].previous_output.txid == funding_tx.compute_txid() {
check_spends!(txn[0], funding_tx);
check_spends!(txn[1], txn[0]);
txn.remove(0)

View file

@ -766,7 +766,7 @@ pub fn build_htlc_input_witness(
let mut witness = Witness::new();
// First push the multisig dummy, note that due to BIP147 (NULLDUMMY) it must be a zero-length element.
witness.push(vec![]);
witness.push_ecdsa_signature(&BitcoinSignature { sig: *remote_sig, hash_ty: remote_sighash_type });
witness.push_ecdsa_signature(&BitcoinSignature { signature: *remote_sig, sighash_type: remote_sighash_type });
witness.push_ecdsa_signature(&BitcoinSignature::sighash_all(*local_sig));
if let Some(preimage) = preimage {
witness.push(preimage.0.to_vec());
@ -1427,7 +1427,7 @@ impl CommitmentTransaction {
let (obscured_commitment_transaction_number, txins) = Self::internal_build_inputs(commitment_number, channel_parameters);
let transaction = Self::make_transaction(obscured_commitment_transaction_number, txins, outputs);
let txid = transaction.txid();
let txid = transaction.compute_txid();
CommitmentTransaction {
commitment_number,
to_broadcaster_value_sat,
@ -1459,7 +1459,7 @@ impl CommitmentTransaction {
let (outputs, _) = Self::internal_build_outputs(keys, self.to_broadcaster_value_sat, self.to_countersignatory_value_sat, &mut htlcs_with_aux, channel_parameters, broadcaster_funding_key, countersignatory_funding_key)?;
let transaction = Self::make_transaction(obscured_commitment_transaction_number, txins, outputs);
let txid = transaction.txid();
let txid = transaction.compute_txid();
let built_transaction = BuiltCommitmentTransaction {
transaction,
txid
@ -1872,11 +1872,10 @@ mod tests {
use bitcoin::secp256k1::{PublicKey, SecretKey, Secp256k1};
use crate::util::test_utils;
use crate::sign::{ChannelSigner, SignerProvider};
use bitcoin::{Network, Txid, ScriptBuf};
use bitcoin::{Network, Txid, ScriptBuf, CompressedPublicKey};
use bitcoin::hashes::Hash;
use bitcoin::hex::FromHex;
use crate::ln::types::PaymentHash;
use bitcoin::address::Payload;
use bitcoin::PublicKey as BitcoinPublicKey;
use crate::ln::features::ChannelTypeFeatures;
@ -1949,7 +1948,7 @@ mod tests {
// Generate broadcaster and counterparty outputs
let tx = builder.build(1000, 2000);
assert_eq!(tx.built.transaction.output.len(), 2);
assert_eq!(tx.built.transaction.output[1].script_pubkey, Payload::p2wpkh(&BitcoinPublicKey::new(builder.counterparty_pubkeys.payment_point)).unwrap().script_pubkey());
assert_eq!(tx.built.transaction.output[1].script_pubkey, bitcoin::address::Address::p2wpkh(&CompressedPublicKey(builder.counterparty_pubkeys.payment_point), Network::Testnet).script_pubkey());
// Generate broadcaster and counterparty outputs as well as two anchors
builder.channel_parameters.channel_type_features = ChannelTypeFeatures::anchors_zero_htlc_fee_and_dependencies();
@ -2053,7 +2052,7 @@ mod tests {
let justice_tx = tx.trust().build_to_local_justice_tx(253, destination_script.clone()).unwrap();
assert_eq!(justice_tx.input.len(), 1);
assert_eq!(justice_tx.input[0].previous_output.txid, tx.built.transaction.txid());
assert_eq!(justice_tx.input[0].previous_output.txid, tx.built.transaction.compute_txid());
assert_eq!(justice_tx.input[0].previous_output.vout, tx.trust().revokeable_output_index().unwrap() as u32);
assert!(justice_tx.input[0].sequence.is_rbf());

View file

@ -50,7 +50,7 @@ fn test_monitor_and_persister_update_fail() {
// Create some initial channel
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
let outpoint = OutPoint { txid: chan.3.txid(), index: 0 };
let outpoint = OutPoint { txid: chan.3.compute_txid(), index: 0 };
// Rebalance the network to generate htlc in the two directions
send_payment(&nodes[0], &vec!(&nodes[1])[..], 10_000_000);
@ -1884,7 +1884,7 @@ fn do_during_funding_monitor_fail(confirm_a_first: bool, restore_b_before_conf:
let events = nodes[0].node.get_and_clear_pending_events();
assert_eq!(events.len(), 0);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().len(), 1);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0)[0].txid(), funding_output.txid);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0)[0].compute_txid(), funding_output.txid);
if confirm_a_first {
confirm_transaction(&nodes[0], &funding_tx);

View file

@ -6714,7 +6714,7 @@ impl<SP: Deref> Channel<SP> where
// Check if the transaction is the expected funding transaction, and if it is,
// check that it pays the right amount to the right script.
if self.context.funding_tx_confirmation_height == 0 {
if tx.txid() == funding_txo.txid {
if tx.compute_txid() == funding_txo.txid {
let txo_idx = funding_txo.index as usize;
if txo_idx >= tx.output.len() || tx.output[txo_idx].script_pubkey != self.context.get_funding_redeemscript().to_p2wsh() ||
tx.output[txo_idx].value.to_sat() != self.context.channel_value_satoshis {
@ -6769,7 +6769,7 @@ impl<SP: Deref> Channel<SP> where
}
for inp in tx.input.iter() {
if inp.previous_output == funding_txo.into_bitcoin_outpoint() {
log_info!(logger, "Detected channel-closing tx {} spending {}:{}, closing channel {}", tx.txid(), inp.previous_output.txid, inp.previous_output.vout, &self.context.channel_id());
log_info!(logger, "Detected channel-closing tx {} spending {}:{}, closing channel {}", tx.compute_txid(), inp.previous_output.txid, inp.previous_output.vout, &self.context.channel_id());
return Err(ClosureReason::CommitmentTxConfirmed);
}
}
@ -9752,7 +9752,7 @@ mod tests {
let tx = Transaction { version: Version::ONE, lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
value: Amount::from_sat(10000000), script_pubkey: output_script.clone(),
}]};
let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint{ txid: tx.compute_txid(), index: 0 };
let funding_created_msg = node_a_chan.get_funding_created(tx.clone(), funding_outpoint, false, &&logger).map_err(|_| ()).unwrap();
let (_, funding_signed_msg, _) = node_b_chan.funding_created(&funding_created_msg.unwrap(), best_block, &&keys_provider, &&logger).map_err(|_| ()).unwrap();
@ -9882,7 +9882,7 @@ mod tests {
let tx = Transaction { version: Version::ONE, lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
value: Amount::from_sat(10000000), script_pubkey: output_script.clone(),
}]};
let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint{ txid: tx.compute_txid(), index: 0 };
let funding_created_msg = node_a_chan.get_funding_created(tx.clone(), funding_outpoint, false, &&logger).map_err(|_| ()).unwrap();
let (mut node_b_chan, funding_signed_msg, _) = node_b_chan.funding_created(&funding_created_msg.unwrap(), best_block, &&keys_provider, &&logger).map_err(|_| ()).unwrap();
@ -10071,7 +10071,7 @@ mod tests {
let tx = Transaction { version: Version::ONE, lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
value: Amount::from_sat(10000000), script_pubkey: output_script.clone(),
}]};
let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint{ txid: tx.compute_txid(), index: 0 };
let funding_created_msg = node_a_chan.get_funding_created(tx.clone(), funding_outpoint, false, &&logger).map_err(|_| ()).unwrap();
let (_, funding_signed_msg, _) = node_b_chan.funding_created(&funding_created_msg.unwrap(), best_block, &&keys_provider, &&logger).map_err(|_| ()).unwrap();
@ -10139,7 +10139,7 @@ mod tests {
let tx = Transaction { version: Version::ONE, lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
value: Amount::from_sat(10000000), script_pubkey: outbound_chan.context.get_funding_redeemscript(),
}]};
let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint{ txid: tx.compute_txid(), index: 0 };
let funding_created = outbound_chan.get_funding_created(tx.clone(), funding_outpoint, false, &&logger).map_err(|_| ()).unwrap().unwrap();
let mut chan = match inbound_chan.funding_created(&funding_created, best_block, &&keys_provider, &&logger) {
Ok((chan, _, _)) => chan,
@ -11272,7 +11272,7 @@ mod tests {
value: Amount::from_sat(10000000), script_pubkey: Builder::new().into_script(),
},
]};
let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint{ txid: tx.compute_txid(), index: 0 };
let funding_created_msg = node_a_chan.get_funding_created(
tx.clone(), funding_outpoint, true, &&logger,
).map_err(|_| ()).unwrap();

View file

@ -724,7 +724,7 @@ enum FundingType {
impl FundingType {
fn txid(&self) -> Txid {
match self {
FundingType::Checked(tx) => tx.txid(),
FundingType::Checked(tx) => tx.compute_txid(),
FundingType::Unchecked(outp) => outp.txid,
}
}
@ -2850,7 +2850,7 @@ macro_rules! handle_monitor_update_completion {
}
}
if let Some(tx) = batch_funding_tx {
log_info!($self.logger, "Broadcasting batch funding transaction with txid {}", tx.txid());
log_info!($self.logger, "Broadcasting batch funding transaction with txid {}", tx.compute_txid());
$self.tx_broadcaster.broadcast_transactions(&[&tx]);
}
}
@ -4546,7 +4546,7 @@ where
#[cfg(test)]
pub(crate) fn funding_transaction_generated_unchecked(&self, temporary_channel_id: ChannelId, counterparty_node_id: PublicKey, funding_transaction: Transaction, output_index: u16) -> Result<(), APIError> {
let txid = funding_transaction.txid();
let txid = funding_transaction.compute_txid();
self.funding_transaction_generated_intern(temporary_channel_id, counterparty_node_id, funding_transaction, false, |_| {
Ok(OutPoint { txid, index: output_index })
}, false)
@ -7051,7 +7051,7 @@ where
if let Some(tx) = funding_broadcastable {
if channel.context.is_manual_broadcast() {
log_info!(logger, "Not broadcasting funding transaction with txid {} as it is manually managed", tx.txid());
log_info!(logger, "Not broadcasting funding transaction with txid {} as it is manually managed", tx.compute_txid());
let mut pending_events = self.pending_events.lock().unwrap();
match channel.context.get_funding_txo() {
Some(funding_txo) => {
@ -7063,7 +7063,7 @@ where
}
};
} else {
log_info!(logger, "Broadcasting funding transaction with txid {}", tx.txid());
log_info!(logger, "Broadcasting funding transaction with txid {}", tx.compute_txid());
self.tx_broadcaster.broadcast_transactions(&[&tx]);
}
}
@ -13342,7 +13342,7 @@ mod tests {
nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), &accept_channel);
let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 1_000_000, 42);
let channel_id = ChannelId::from_bytes(tx.txid().to_byte_array());
let channel_id = ChannelId::from_bytes(tx.compute_txid().to_byte_array());
{
// Ensure that the `outpoint_to_peer` map is empty until either party has received the
// funding transaction, and have the real `channel_id`.

View file

@ -331,7 +331,7 @@ fn do_connect_block_without_consistency_checks<'a, 'b, 'c, 'd>(node: &'a Node<'b
let wallet_script = node.wallet_source.get_change_script().unwrap();
for (idx, output) in tx.output.iter().enumerate() {
if output.script_pubkey == wallet_script {
let outpoint = bitcoin::OutPoint { txid: tx.txid(), vout: idx as u32 };
let outpoint = bitcoin::OutPoint { txid: tx.compute_txid(), vout: idx as u32 };
node.wallet_source.add_utxo(outpoint, output.value);
}
}
@ -362,8 +362,8 @@ pub fn disconnect_blocks<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, count: u32)
},
ConnectStyle::BestBlockFirstReorgsOnlyTip|ConnectStyle::TransactionsFirstReorgsOnlyTip => {
for tx in orig.0.txdata {
node.chain_monitor.chain_monitor.transaction_unconfirmed(&tx.txid());
node.node.transaction_unconfirmed(&tx.txid());
node.chain_monitor.chain_monitor.transaction_unconfirmed(&tx.compute_txid());
node.node.transaction_unconfirmed(&tx.compute_txid());
}
},
_ => {
@ -1207,7 +1207,7 @@ fn internal_create_funding_transaction<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>,
let tx = Transaction { version: transaction::Version(chan_id as i32), lock_time: LockTime::ZERO, input, output: vec![TxOut {
value: Amount::from_sat(*channel_value_satoshis), script_pubkey: output_script.clone(),
}]};
let funding_outpoint = OutPoint { txid: tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: tx.compute_txid(), index: 0 };
(*temporary_channel_id, tx, funding_outpoint)
},
_ => panic!("Unexpected event"),
@ -1525,7 +1525,7 @@ pub fn update_nodes_with_chan_announce<'a, 'b, 'c, 'd>(nodes: &'a Vec<Node<'b, '
pub fn do_check_spends<F: Fn(&bitcoin::transaction::OutPoint) -> Option<TxOut>>(tx: &Transaction, get_output: F) {
for outp in tx.output.iter() {
assert!(outp.value >= outp.script_pubkey.dust_value(), "Spending tx output didn't meet dust limit");
assert!(outp.value >= outp.script_pubkey.minimal_non_dust(), "Spending tx output didn't meet dust limit");
}
let mut total_value_in = 0;
for input in tx.input.iter() {
@ -1547,12 +1547,12 @@ macro_rules! check_spends {
{
$(
for outp in $spends_txn.output.iter() {
assert!(outp.value >= outp.script_pubkey.dust_value(), "Input tx output didn't meet dust limit");
assert!(outp.value >= outp.script_pubkey.minimal_non_dust(), "Input tx output didn't meet dust limit");
}
)*
let get_output = |out_point: &bitcoin::transaction::OutPoint| {
$(
if out_point.txid == $spends_txn.txid() {
if out_point.txid == $spends_txn.compute_txid() {
return $spends_txn.output.get(out_point.vout as usize).cloned()
}
)*
@ -3369,12 +3369,12 @@ pub enum HTLCType { NONE, TIMEOUT, SUCCESS }
pub fn test_txn_broadcast<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, chan: &(msgs::ChannelUpdate, msgs::ChannelUpdate, ChannelId, Transaction), commitment_tx: Option<Transaction>, has_htlc_tx: HTLCType) -> Vec<Transaction> {
let mut node_txn = node.tx_broadcaster.txn_broadcasted.lock().unwrap();
let mut txn_seen = new_hash_set();
node_txn.retain(|tx| txn_seen.insert(tx.txid()));
node_txn.retain(|tx| txn_seen.insert(tx.compute_txid()));
assert!(node_txn.len() >= if commitment_tx.is_some() { 0 } else { 1 } + if has_htlc_tx == HTLCType::NONE { 0 } else { 1 });
let mut res = Vec::with_capacity(2);
node_txn.retain(|tx| {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == chan.3.txid() {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == chan.3.compute_txid() {
check_spends!(tx, chan.3);
if commitment_tx.is_none() {
res.push(tx.clone());
@ -3390,7 +3390,7 @@ pub fn test_txn_broadcast<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, chan: &(msgs::Cha
if has_htlc_tx != HTLCType::NONE {
node_txn.retain(|tx| {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == res[0].txid() {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == res[0].compute_txid() {
check_spends!(tx, res[0]);
if has_htlc_tx == HTLCType::TIMEOUT {
assert_ne!(tx.lock_time, LockTime::ZERO);
@ -3419,7 +3419,7 @@ pub fn test_revoked_htlc_claim_txn_broadcast<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>
// for revoked htlc outputs
if node_txn.len() != 1 && node_txn.len() != 2 && node_txn.len() != 3 { assert!(false); }
node_txn.retain(|tx| {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == revoked_tx.txid() {
if tx.input.len() == 1 && tx.input[0].previous_output.txid == revoked_tx.compute_txid() {
check_spends!(tx, revoked_tx);
false
} else { true }
@ -3434,12 +3434,12 @@ pub fn test_revoked_htlc_claim_txn_broadcast<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>
pub fn check_preimage_claim<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, prev_txn: &Vec<Transaction>) -> Vec<Transaction> {
let mut node_txn = node.tx_broadcaster.txn_broadcasted.lock().unwrap();
let mut txn_seen = new_hash_set();
node_txn.retain(|tx| txn_seen.insert(tx.txid()));
node_txn.retain(|tx| txn_seen.insert(tx.compute_txid()));
let mut found_prev = false;
for prev_tx in prev_txn {
for tx in &*node_txn {
if tx.input[0].previous_output.txid == prev_tx.txid() {
if tx.input[0].previous_output.txid == prev_tx.compute_txid() {
check_spends!(tx, prev_tx);
let mut iter = tx.input[0].witness.iter();
iter.next().expect("expected 3 witness items");

View file

@ -2382,7 +2382,7 @@ fn channel_monitor_network_test() {
let node_txn = test_txn_broadcast(&nodes[2], &chan_3, None, HTLCType::NONE);
connect_blocks(&nodes[2], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + MIN_CLTV_EXPIRY_DELTA as u32 + 1);
test_txn_broadcast(&nodes[2], &chan_3, None, HTLCType::TIMEOUT);
node2_commitment_txid = node_txn[0].txid();
node2_commitment_txid = node_txn[0].compute_txid();
// Claim the payment on nodes[3], giving it knowledge of the preimage
claim_funds!(nodes[3], nodes[2], payment_preimage_1, payment_hash_1);
@ -2398,7 +2398,7 @@ fn channel_monitor_network_test() {
// Drop the ChannelMonitor for the previous channel to avoid it broadcasting transactions and
// confusing us in the following tests.
let chan_3_mon = nodes[3].chain_monitor.chain_monitor.remove_monitor(&OutPoint { txid: chan_3.3.txid(), index: 0 });
let chan_3_mon = nodes[3].chain_monitor.chain_monitor.remove_monitor(&OutPoint { txid: chan_3.3.compute_txid(), index: 0 });
// One pending HTLC to time out:
let (payment_preimage_2, payment_hash_2, ..) = route_payment(&nodes[3], &[&nodes[4]], 3_000_000);
@ -2466,7 +2466,7 @@ fn channel_monitor_network_test() {
assert_eq!(nodes[3].node.list_channels().len(), 0);
assert_eq!(nodes[4].node.list_channels().len(), 0);
assert_eq!(nodes[3].chain_monitor.chain_monitor.watch_channel(OutPoint { txid: chan_3.3.txid(), index: 0 }, chan_3_mon),
assert_eq!(nodes[3].chain_monitor.chain_monitor.watch_channel(OutPoint { txid: chan_3.3.compute_txid(), index: 0 }, chan_3_mon),
Ok(ChannelMonitorUpdateStatus::Completed));
check_closed_event!(nodes[3], 1, ClosureReason::HTLCsTimedOut, [nodes[4].node.get_our_node_id()], 100000);
}
@ -2498,10 +2498,10 @@ fn test_justice_tx_htlc_timeout() {
let revoked_local_txn = get_local_commitment_txn!(nodes[0], chan_5.2);
assert_eq!(revoked_local_txn.len(), 2); // First commitment tx, then HTLC tx
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_5.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_5.3.compute_txid());
assert_eq!(revoked_local_txn[0].output.len(), 2); // Only HTLC and output back to 0 are present
assert_eq!(revoked_local_txn[1].input.len(), 1);
assert_eq!(revoked_local_txn[1].input[0].previous_output.txid, revoked_local_txn[0].txid());
assert_eq!(revoked_local_txn[1].input[0].previous_output.txid, revoked_local_txn[0].compute_txid());
assert_eq!(revoked_local_txn[1].input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT); // HTLC-Timeout
// Revoke the old state
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage_3);
@ -2561,7 +2561,7 @@ fn test_justice_tx_htlc_success() {
let revoked_local_txn = get_local_commitment_txn!(nodes[1], chan_6.2);
assert_eq!(revoked_local_txn.len(), 1); // Only commitment tx
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_6.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_6.3.compute_txid());
assert_eq!(revoked_local_txn[0].output.len(), 2); // Only HTLC and output back to A are present
// Revoke the old state
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage_4);
@ -2644,7 +2644,7 @@ fn do_test_forming_justice_tx_from_monitor_updates(broadcast_initial_commitment:
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let (_, _, channel_id, funding_tx) = create_announced_chan_between_nodes(&nodes, 0, 1);
let funding_txo = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_txo = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
if !broadcast_initial_commitment {
// Send a payment to move the channel forward
@ -2660,7 +2660,7 @@ fn do_test_forming_justice_tx_from_monitor_updates(broadcast_initial_commitment:
// Send another payment, now revoking the previous commitment tx
send_payment(&nodes[0], &vec!(&nodes[1])[..], 5_000_000);
let justice_tx = persisters[1].justice_tx(funding_txo, &revoked_commitment_tx.txid()).unwrap();
let justice_tx = persisters[1].justice_tx(funding_txo, &revoked_commitment_tx.compute_txid()).unwrap();
check_spends!(justice_tx, revoked_commitment_tx);
mine_transactions(&nodes[1], &[revoked_commitment_tx, &justice_tx]);
@ -2712,9 +2712,9 @@ fn claim_htlc_outputs_shared_tx() {
let revoked_local_txn = get_local_commitment_txn!(nodes[0], chan_1.2);
assert_eq!(revoked_local_txn.len(), 2); // commitment tx + 1 HTLC-Timeout tx
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.compute_txid());
assert_eq!(revoked_local_txn[1].input.len(), 1);
assert_eq!(revoked_local_txn[1].input[0].previous_output.txid, revoked_local_txn[0].txid());
assert_eq!(revoked_local_txn[1].input[0].previous_output.txid, revoked_local_txn[0].compute_txid());
assert_eq!(revoked_local_txn[1].input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT); // HTLC-Timeout
check_spends!(revoked_local_txn[1], revoked_local_txn[0]);
@ -2808,7 +2808,7 @@ fn claim_htlc_outputs_single_tx() {
check_spends!(node_txn[1], node_txn[0]);
// Filter out any non justice transactions.
node_txn.retain(|tx| tx.input[0].previous_output.txid == revoked_local_txn[0].txid());
node_txn.retain(|tx| tx.input[0].previous_output.txid == revoked_local_txn[0].compute_txid());
assert!(node_txn.len() > 3);
assert_eq!(node_txn[0].input.len(), 1);
@ -2916,7 +2916,7 @@ fn test_htlc_on_chain_success() {
{
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
assert_eq!(added_monitors.len(), 1);
assert_eq!(added_monitors[0].0.txid, chan_2.3.txid());
assert_eq!(added_monitors[0].0.txid, chan_2.3.compute_txid());
added_monitors.clear();
}
let forwarded_events = nodes[1].node.get_and_clear_pending_events();
@ -2954,8 +2954,8 @@ fn test_htlc_on_chain_success() {
{
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
assert_eq!(added_monitors.len(), 2);
assert_eq!(added_monitors[0].0.txid, chan_1.3.txid());
assert_eq!(added_monitors[1].0.txid, chan_1.3.txid());
assert_eq!(added_monitors[0].0.txid, chan_1.3.compute_txid());
assert_eq!(added_monitors[1].0.txid, chan_1.3.compute_txid());
added_monitors.clear();
}
assert_eq!(events.len(), 3);
@ -3028,7 +3028,7 @@ fn test_htlc_on_chain_success() {
} else {
// Certain `ConnectStyle`s will cause RBF bumps of the previous HTLC transaction to be broadcast.
// FullBlockViaListen
if node_txn[0].input[0].previous_output.txid == node_a_commitment_tx[0].txid() {
if node_txn[0].input[0].previous_output.txid == node_a_commitment_tx[0].compute_txid() {
check_spends!(node_txn[1], commitment_tx[0]);
check_spends!(node_txn[2], commitment_tx[0]);
assert_ne!(node_txn[1].input[0].previous_output.vout, node_txn[2].input[0].previous_output.vout);
@ -3697,7 +3697,7 @@ fn test_force_close_fail_back() {
assert_eq!(node_txn.len(), if nodes[2].connect_style.borrow().updates_best_block_first() { 2 } else { 1 });
let htlc_tx = node_txn.pop().unwrap();
assert_eq!(htlc_tx.input.len(), 1);
assert_eq!(htlc_tx.input[0].previous_output.txid, commitment_tx.txid());
assert_eq!(htlc_tx.input[0].previous_output.txid, commitment_tx.compute_txid());
assert_eq!(htlc_tx.lock_time, LockTime::ZERO); // Must be an HTLC-Success
assert_eq!(htlc_tx.input[0].witness.len(), 5); // Must be an HTLC-Success
@ -4614,7 +4614,7 @@ fn test_claim_on_remote_revoked_sizeable_push_msat() {
let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
let revoked_local_txn = get_local_commitment_txn!(nodes[0], chan.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
mine_transaction(&nodes[1], &revoked_local_txn[0]);
@ -4647,7 +4647,7 @@ fn test_static_spendable_outputs_preimage_tx() {
let commitment_tx = get_local_commitment_txn!(nodes[0], chan_1.2);
assert_eq!(commitment_tx[0].input.len(), 1);
assert_eq!(commitment_tx[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(commitment_tx[0].input[0].previous_output.txid, chan_1.3.compute_txid());
// Settle A's commitment tx on B's chain
nodes[1].node.claim_funds(payment_preimage);
@ -4697,7 +4697,7 @@ fn test_static_spendable_outputs_timeout_tx() {
let commitment_tx = get_local_commitment_txn!(nodes[0], chan_1.2);
assert_eq!(commitment_tx[0].input.len(), 1);
assert_eq!(commitment_tx[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(commitment_tx[0].input[0].previous_output.txid, chan_1.3.compute_txid());
// Settle A's commitment tx on B' chain
mine_transaction(&nodes[1], &commitment_tx[0]);
@ -4740,7 +4740,7 @@ fn test_static_spendable_outputs_justice_tx_revoked_commitment_tx() {
let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
let revoked_local_txn = get_local_commitment_txn!(nodes[0], chan_1.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.compute_txid());
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
@ -4776,7 +4776,7 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_timeout_tx() {
let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
let revoked_local_txn = get_local_commitment_txn!(nodes[0], chan_1.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.compute_txid());
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
@ -4810,10 +4810,10 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_timeout_tx() {
assert_eq!(node_txn[1].input.len(), 2);
check_spends!(node_txn[1], revoked_local_txn[0], revoked_htlc_txn[0]);
if node_txn[1].input[1].previous_output.txid == revoked_htlc_txn[0].txid() {
if node_txn[1].input[1].previous_output.txid == revoked_htlc_txn[0].compute_txid() {
assert_ne!(node_txn[1].input[0].previous_output, revoked_htlc_txn[0].input[0].previous_output);
} else {
assert_eq!(node_txn[1].input[0].previous_output.txid, revoked_htlc_txn[0].txid());
assert_eq!(node_txn[1].input[0].previous_output.txid, revoked_htlc_txn[0].compute_txid());
assert_ne!(node_txn[1].input[1].previous_output, revoked_htlc_txn[0].input[0].previous_output);
}
@ -4841,7 +4841,7 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_success_tx() {
let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
let revoked_local_txn = get_local_commitment_txn!(nodes[1], chan_1.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan_1.3.compute_txid());
// The to-be-revoked commitment tx should have one HTLC and one to_remote output
assert_eq!(revoked_local_txn[0].output.len(), 2);
@ -4878,10 +4878,10 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_success_tx() {
// transactions next...
assert_eq!(node_txn[0].input.len(), 2);
check_spends!(node_txn[0], revoked_local_txn[0], revoked_htlc_txn[0]);
if node_txn[0].input[1].previous_output.txid == revoked_htlc_txn[0].txid() {
if node_txn[0].input[1].previous_output.txid == revoked_htlc_txn[0].compute_txid() {
assert_eq!(node_txn[0].input[0].previous_output, revoked_htlc_txn[0].input[0].previous_output);
} else {
assert_eq!(node_txn[0].input[0].previous_output.txid, revoked_htlc_txn[0].txid());
assert_eq!(node_txn[0].input[0].previous_output.txid, revoked_htlc_txn[0].compute_txid());
assert_eq!(node_txn[0].input[1].previous_output, revoked_htlc_txn[0].input[0].previous_output);
}
@ -7205,7 +7205,7 @@ fn do_test_sweep_outbound_htlc_failure_update(revoked: bool, local: bool) {
connect_blocks(&nodes[0], TEST_FINAL_CLTV); // Confirm blocks until the HTLC expires
timeout_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().drain(..)
.filter(|tx| tx.input[0].previous_output.txid == bs_commitment_tx[0].txid()).collect();
.filter(|tx| tx.input[0].previous_output.txid == bs_commitment_tx[0].compute_txid()).collect();
check_spends!(timeout_tx[0], bs_commitment_tx[0]);
// For both a revoked or non-revoked commitment transaction, after ANTI_REORG_DELAY the
// dust HTLC should have been failed.
@ -7490,8 +7490,8 @@ fn test_bump_penalty_txn_on_revoked_commitment() {
// Revoked commitment txn with 4 outputs : to_local, to_remote, 1 outgoing HTLC, 1 incoming HTLC
assert_eq!(revoked_txn[0].output.len(), 4);
assert_eq!(revoked_txn[0].input.len(), 1);
assert_eq!(revoked_txn[0].input[0].previous_output.txid, chan.3.txid());
let revoked_txid = revoked_txn[0].txid();
assert_eq!(revoked_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
let revoked_txid = revoked_txn[0].compute_txid();
let mut penalty_sum = 0;
for outp in revoked_txn[0].output.iter() {
@ -7519,7 +7519,7 @@ fn test_bump_penalty_txn_on_revoked_commitment() {
check_spends!(node_txn[0], revoked_txn[0]);
let fee_1 = penalty_sum - node_txn[0].output[0].value.to_sat();
feerate_1 = fee_1 * 1000 / node_txn[0].weight().to_wu();
penalty_1 = node_txn[0].txid();
penalty_1 = node_txn[0].compute_txid();
node_txn.clear();
};
@ -7534,7 +7534,7 @@ fn test_bump_penalty_txn_on_revoked_commitment() {
assert_eq!(node_txn[0].input.len(), 3); // Penalty txn claims to_local, offered_htlc and received_htlc outputs
assert_eq!(node_txn[0].output.len(), 1);
check_spends!(node_txn[0], revoked_txn[0]);
penalty_2 = node_txn[0].txid();
penalty_2 = node_txn[0].compute_txid();
// Verify new bumped tx is different from last claiming transaction, we don't want spurrious rebroadcast
assert_ne!(penalty_2, penalty_1);
let fee_2 = penalty_sum - node_txn[0].output[0].value.to_sat();
@ -7557,7 +7557,7 @@ fn test_bump_penalty_txn_on_revoked_commitment() {
assert_eq!(node_txn[0].input.len(), 3); // Penalty txn claims to_local, offered_htlc and received_htlc outputs
assert_eq!(node_txn[0].output.len(), 1);
check_spends!(node_txn[0], revoked_txn[0]);
penalty_3 = node_txn[0].txid();
penalty_3 = node_txn[0].compute_txid();
// Verify new bumped tx is different from last claiming transaction, we don't want spurrious rebroadcast
assert_ne!(penalty_3, penalty_2);
let fee_3 = penalty_sum - node_txn[0].output[0].value.to_sat();
@ -7602,7 +7602,7 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
let revoked_local_txn = get_local_commitment_txn!(nodes[1], chan.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
// Revoke local commitment tx
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
@ -7677,7 +7677,7 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
assert_eq!(node_txn[3].output.len(), 1);
check_spends!(node_txn[3], revoked_htlc_txn[0], revoked_htlc_txn[1]);
first = node_txn[3].txid();
first = node_txn[3].compute_txid();
// Store both feerates for later comparison
let fee_1 = revoked_htlc_txn[0].output[0].value + revoked_htlc_txn[1].output[0].value - node_txn[3].output[0].value;
feerate_1 = fee_1 * 1000 / node_txn[3].weight().to_wu();
@ -7702,7 +7702,7 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
assert_eq!(node_txn[0].input.len(), 2);
check_spends!(node_txn[0], revoked_htlc_txn[0], revoked_htlc_txn[1]);
// Verify bumped tx is different and 25% bump heuristic
assert_ne!(first, node_txn[0].txid());
assert_ne!(first, node_txn[0].compute_txid());
let fee_2 = revoked_htlc_txn[0].output[0].value + revoked_htlc_txn[1].output[0].value - node_txn[0].output[0].value;
let feerate_2 = fee_2 * 1000 / node_txn[0].weight().to_wu();
assert!(feerate_2 * 100 > feerate_1 * 125);
@ -7750,7 +7750,7 @@ fn test_bump_penalty_txn_on_remote_commitment() {
let remote_txn = get_local_commitment_txn!(nodes[0], chan.2);
assert_eq!(remote_txn[0].output.len(), 4);
assert_eq!(remote_txn[0].input.len(), 1);
assert_eq!(remote_txn[0].input[0].previous_output.txid, chan.3.txid());
assert_eq!(remote_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
// Claim a HTLC without revocation (provide B monitor with preimage)
nodes[1].node.claim_funds(payment_preimage);
@ -7777,7 +7777,7 @@ fn test_bump_penalty_txn_on_remote_commitment() {
check_spends!(node_txn[1], remote_txn[0]);
check_spends!(node_txn[2], remote_txn[0]);
preimage = node_txn[0].txid();
preimage = node_txn[0].compute_txid();
let index = node_txn[0].input[0].previous_output.vout;
let fee = remote_txn[0].output[index as usize].value.to_sat() - node_txn[0].output[0].value.to_sat();
feerate_preimage = fee * 1000 / node_txn[0].weight().to_wu();
@ -7792,7 +7792,7 @@ fn test_bump_penalty_txn_on_remote_commitment() {
check_spends!(preimage_bump, remote_txn[0]);
assert_eq!(node_txn[0].input[0].previous_output, preimage_bump.input[0].previous_output);
timeout = timeout_tx.txid();
timeout = timeout_tx.compute_txid();
let index = timeout_tx.input[0].previous_output.vout;
let fee = remote_txn[0].output[index as usize].value.to_sat() - timeout_tx.output[0].value.to_sat();
feerate_timeout = fee * 1000 / timeout_tx.weight().to_wu();
@ -7816,13 +7816,13 @@ fn test_bump_penalty_txn_on_remote_commitment() {
let fee = remote_txn[0].output[index as usize].value.to_sat() - preimage_bump.output[0].value.to_sat();
let new_feerate = fee * 1000 / preimage_bump.weight().to_wu();
assert!(new_feerate * 100 > feerate_timeout * 125);
assert_ne!(timeout, preimage_bump.txid());
assert_ne!(timeout, preimage_bump.compute_txid());
let index = node_txn[0].input[0].previous_output.vout;
let fee = remote_txn[0].output[index as usize].value.to_sat() - node_txn[0].output[0].value.to_sat();
let new_feerate = fee * 1000 / node_txn[0].weight().to_wu();
assert!(new_feerate * 100 > feerate_preimage * 125);
assert_ne!(preimage, node_txn[0].txid());
assert_ne!(preimage, node_txn[0].compute_txid());
node_txn.clear();
}
@ -7902,7 +7902,7 @@ fn test_bump_txn_sanitize_tracking_maps() {
let revoked_local_txn = get_local_commitment_txn!(nodes[1], chan.2);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
// Revoke local commitment tx
claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage_1);
@ -7929,7 +7929,7 @@ fn test_bump_txn_sanitize_tracking_maps() {
connect_block(&nodes[0], &create_dummy_block(nodes[0].best_block_hash(), 42, penalty_txn));
connect_blocks(&nodes[0], ANTI_REORG_DELAY - 1);
{
let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(OutPoint { txid: chan.3.txid(), index: 0 }).unwrap();
let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(OutPoint { txid: chan.3.compute_txid(), index: 0 }).unwrap();
assert!(monitor.inner.lock().unwrap().onchain_tx_handler.pending_claim_requests.is_empty());
assert!(monitor.inner.lock().unwrap().onchain_tx_handler.claimable_outpoints.is_empty());
}
@ -8544,7 +8544,7 @@ fn test_update_err_monitor_lockdown() {
// Create some initial channel
let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1);
let outpoint = OutPoint { txid: chan_1.3.txid(), index: 0 };
let outpoint = OutPoint { txid: chan_1.3.compute_txid(), index: 0 };
// Rebalance the network to generate htlc in the two directions
send_payment(&nodes[0], &vec!(&nodes[1])[..], 10_000_000);
@ -8614,7 +8614,7 @@ fn test_concurrent_monitor_claim() {
// Create some initial channel
let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1);
let outpoint = OutPoint { txid: chan_1.3.txid(), index: 0 };
let outpoint = OutPoint { txid: chan_1.3.compute_txid(), index: 0 };
// Rebalance the network to generate htlc in the two directions
send_payment(&nodes[0], &vec!(&nodes[1])[..], 10_000_000);
@ -8965,7 +8965,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
} else {
if nodes[1].connect_style.borrow().updates_best_block_first() {
assert_eq!(bob_txn.len(), 3);
assert_eq!(bob_txn[0].txid(), bob_txn[1].txid());
assert_eq!(bob_txn[0].compute_txid(), bob_txn[1].compute_txid());
} else {
assert_eq!(bob_txn.len(), 2);
}
@ -9164,7 +9164,7 @@ fn test_duplicate_funding_err_in_funding() {
let nodes = create_network(3, &node_cfgs, &node_chanmgrs);
let (_, _, _, real_channel_id, funding_tx) = create_chan_between_nodes(&nodes[0], &nodes[1]);
let real_chan_funding_txo = chain::transaction::OutPoint { txid: funding_tx.txid(), index: 0 };
let real_chan_funding_txo = chain::transaction::OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(real_chan_funding_txo), real_channel_id);
nodes[2].node.create_channel(nodes[1].node.get_our_node_id(), 100_000, 0, 42, None, None).unwrap();
@ -9491,7 +9491,7 @@ fn test_invalid_funding_tx() {
version: Version::TWO, lock_time: LockTime::ZERO,
input: tx.output.iter().enumerate().map(|(idx, _)| TxIn {
previous_output: BitcoinOutPoint {
txid: tx.txid(),
txid: tx.compute_txid(),
vout: idx as u32,
},
script_sig: ScriptBuf::new(),
@ -9618,7 +9618,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
// We should broadcast an HTLC transaction spending our funding transaction first
let spending_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(spending_txn.len(), 2);
let htlc_tx = if spending_txn[0].txid() == node_txn[0].txid() {
let htlc_tx = if spending_txn[0].compute_txid() == node_txn[0].compute_txid() {
&spending_txn[1]
} else {
&spending_txn[0]
@ -10923,7 +10923,7 @@ fn test_batch_channel_open() {
// Complete the persistence of the monitor.
nodes[0].chain_monitor.complete_sole_pending_chan_update(
&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 1 })
&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.compute_txid(), index: 1 })
);
let events = nodes[0].node.get_and_clear_pending_events();
@ -10977,8 +10977,8 @@ fn test_close_in_funding_batch() {
assert_eq!(nodes[0].tx_broadcaster.txn_broadcast().len(), 0);
// Force-close the channel for which we've completed the initial monitor.
let funding_txo_1 = OutPoint { txid: tx.txid(), index: 0 };
let funding_txo_2 = OutPoint { txid: tx.txid(), index: 1 };
let funding_txo_1 = OutPoint { txid: tx.compute_txid(), index: 0 };
let funding_txo_2 = OutPoint { txid: tx.compute_txid(), index: 1 };
let channel_id_1 = ChannelId::v1_from_funding_outpoint(funding_txo_1);
let channel_id_2 = ChannelId::v1_from_funding_outpoint(funding_txo_2);
let error_message = "Channel force-closed";
@ -11003,9 +11003,9 @@ fn test_close_in_funding_batch() {
{
let broadcasted_txs = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(broadcasted_txs.len(), 1);
assert!(broadcasted_txs[0].txid() != tx.txid());
assert!(broadcasted_txs[0].compute_txid() != tx.compute_txid());
assert_eq!(broadcasted_txs[0].input.len(), 1);
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.txid());
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.compute_txid());
}
// All channels in the batch should close immediately.
@ -11066,8 +11066,8 @@ fn test_batch_funding_close_after_funding_signed() {
assert_eq!(nodes[0].tx_broadcaster.txn_broadcast().len(), 0);
// Force-close the channel for which we've completed the initial monitor.
let funding_txo_1 = OutPoint { txid: tx.txid(), index: 0 };
let funding_txo_2 = OutPoint { txid: tx.txid(), index: 1 };
let funding_txo_1 = OutPoint { txid: tx.compute_txid(), index: 0 };
let funding_txo_2 = OutPoint { txid: tx.compute_txid(), index: 1 };
let channel_id_1 = ChannelId::v1_from_funding_outpoint(funding_txo_1);
let channel_id_2 = ChannelId::v1_from_funding_outpoint(funding_txo_2);
let error_message = "Channel force-closed";
@ -11092,9 +11092,9 @@ fn test_batch_funding_close_after_funding_signed() {
{
let broadcasted_txs = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(broadcasted_txs.len(), 1);
assert!(broadcasted_txs[0].txid() != tx.txid());
assert!(broadcasted_txs[0].compute_txid() != tx.compute_txid());
assert_eq!(broadcasted_txs[0].input.len(), 1);
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.txid());
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.compute_txid());
}
// All channels in the batch should close immediately.
@ -11130,7 +11130,7 @@ fn do_test_funding_and_commitment_tx_confirm_same_block(confirm_remote_commitmen
let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let funding_tx = create_chan_between_nodes_with_value_init(&nodes[0], &nodes[1], 1_000_000, 0);
let chan_id = ChannelId::v1_from_funding_outpoint(chain::transaction::OutPoint { txid: funding_tx.txid(), index: 0 });
let chan_id = ChannelId::v1_from_funding_outpoint(chain::transaction::OutPoint { txid: funding_tx.compute_txid(), index: 0 });
assert_eq!(nodes[0].node.list_channels().len(), 1);
assert_eq!(nodes[1].node.list_channels().len(), 1);

View file

@ -331,7 +331,7 @@ impl NegotiationContext {
}
let transaction = msg.prevtx.as_transaction();
let txid = transaction.txid();
let txid = transaction.compute_txid();
if let Some(tx_out) = transaction.output.get(msg.prevtx_out as usize) {
if !tx_out.script_pubkey.is_witness_program() {
@ -415,7 +415,7 @@ impl NegotiationContext {
return Err(AbortReason::ReceivedTooManyTxAddOutputs);
}
if msg.sats < msg.script.dust_value().to_sat() {
if msg.sats < msg.script.minimal_non_dust().to_sat() {
// The receiving node:
// - MUST fail the negotiation if:
// - the sats amount is less than the dust_limit
@ -504,7 +504,7 @@ impl NegotiationContext {
fn sent_tx_add_input(&mut self, msg: &msgs::TxAddInput) -> Result<(), AbortReason> {
let tx = msg.prevtx.as_transaction();
let txin = TxIn {
previous_output: OutPoint { txid: tx.txid(), vout: msg.prevtx_out },
previous_output: OutPoint { txid: tx.compute_txid(), vout: msg.prevtx_out },
sequence: Sequence(msg.sequence),
..Default::default()
};
@ -1629,7 +1629,7 @@ mod tests {
fn generate_inputs(outputs: &[TestOutput]) -> Vec<(TxIn, TransactionU16LenLimited)> {
let tx = generate_tx(outputs);
let txid = tx.txid();
let txid = tx.compute_txid();
tx.output
.iter()
.enumerate()
@ -1704,7 +1704,7 @@ mod tests {
&vec![TestOutput::P2WPKH(1_000_000); tx_output_count as usize],
(1337 + remaining).into(),
);
let txid = tx.txid();
let txid = tx.compute_txid();
let mut temp: Vec<(TxIn, TransactionU16LenLimited)> = tx
.output
@ -1908,7 +1908,7 @@ mod tests {
let tx =
TransactionU16LenLimited::new(generate_tx(&[TestOutput::P2WPKH(1_000_000)])).unwrap();
let invalid_sequence_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
previous_output: OutPoint { txid: tx.as_transaction().compute_txid(), vout: 0 },
..Default::default()
};
do_test_interactive_tx_constructor(TestSession {
@ -1922,7 +1922,7 @@ mod tests {
b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
let duplicate_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
previous_output: OutPoint { txid: tx.as_transaction().compute_txid(), vout: 0 },
sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
..Default::default()
};
@ -1938,7 +1938,7 @@ mod tests {
});
// Non-initiator uses same prevout as initiator.
let duplicate_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
previous_output: OutPoint { txid: tx.as_transaction().compute_txid(), vout: 0 },
sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
..Default::default()
};
@ -1953,7 +1953,7 @@ mod tests {
b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 95_000)),
});
let duplicate_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
previous_output: OutPoint { txid: tx.as_transaction().compute_txid(), vout: 0 },
sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
..Default::default()
};
@ -2005,7 +2005,7 @@ mod tests {
description: "Initiator sends an output below dust value",
inputs_a: vec![],
outputs_a: generate_funding_output(
generate_p2wsh_script_pubkey().dust_value().to_sat() - 1,
generate_p2wsh_script_pubkey().minimal_non_dust().to_sat() - 1,
),
inputs_b: vec![],
outputs_b: vec![],

View file

@ -209,7 +209,7 @@ fn archive_fully_resolved_monitors() {
// Remove the corresponding outputs and transactions the chain source is
// watching. This is to make sure the `Drop` function assertions pass.
nodes.get_mut(0).unwrap().chain_source.remove_watched_txn_and_outputs(
OutPoint { txid: funding_tx.txid(), index: 0 },
OutPoint { txid: funding_tx.compute_txid(), index: 0 },
funding_tx.output[0].script_pubkey.clone()
);
}
@ -231,7 +231,7 @@ fn do_chanmon_claim_value_coop_close(anchors: bool) {
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 1_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
let chan_feerate = get_feerate!(nodes[0], nodes[1], chan_id) as u64;
@ -389,13 +389,13 @@ fn do_test_claim_value_force_close(anchors: bool, prev_commitment_tx: bool) {
],
};
if anchors {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 1 }, coinbase_tx.output[1].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 1 }, coinbase_tx.output[1].value);
}
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 1_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
// This HTLC is immediately claimed, giving node B the preimage
@ -789,14 +789,14 @@ fn do_test_balances_on_local_commitment_htlcs(anchors: bool) {
],
};
if anchors {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 1 }, coinbase_tx.output[1].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 1 }, coinbase_tx.output[1].value);
}
// Create a single channel with two pending HTLCs from nodes[0] to nodes[1], one which nodes[1]
// knows the preimage for, one which it does not.
let (_, _, chan_id, funding_tx) = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 0);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], 10_000_000);
let htlc_cltv_timeout = nodes[0].best_block_info().1 + TEST_FINAL_CLTV + 1; // Note ChannelManager adds one to CLTV timeouts for safety
@ -849,7 +849,7 @@ fn do_test_balances_on_local_commitment_htlcs(anchors: bool) {
if nodes[0].connect_style.borrow().updates_best_block_first() {
let mut txn = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 1);
assert_eq!(txn[0].txid(), commitment_tx.txid());
assert_eq!(txn[0].compute_txid(), commitment_tx.compute_txid());
}
let htlc_balance_known_preimage = Balance::MaybeTimeoutClaimableHTLC {
@ -1000,7 +1000,7 @@ fn test_no_preimage_inbound_htlc_balances() {
let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let (_, _, chan_id, funding_tx) = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 500_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
// Send two HTLCs, one from A to B, and one from B to A.
let to_b_failed_payment_hash = route_payment(&nodes[0], &[&nodes[1]], 10_000_000).1;
@ -1263,7 +1263,7 @@ fn do_test_revoked_counterparty_commitment_balances(anchors: bool, confirm_htlc_
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 100_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
// We create five HTLCs for B to claim against A's revoked commitment transaction:
@ -1368,7 +1368,7 @@ fn do_test_revoked_counterparty_commitment_balances(anchors: bool, confirm_htlc_
sorted_vec(nodes[1].chain_monitor.chain_monitor.get_monitor(funding_outpoint).unwrap().get_claimable_balances()));
mine_transaction(&nodes[1], &as_revoked_txn[0]);
let mut claim_txn: Vec<_> = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().drain(..).filter(|tx| tx.input.iter().any(|inp| inp.previous_output.txid == as_revoked_txn[0].txid())).collect();
let mut claim_txn: Vec<_> = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().drain(..).filter(|tx| tx.input.iter().any(|inp| inp.previous_output.txid == as_revoked_txn[0].compute_txid())).collect();
// Currently the revoked commitment is claimed in four transactions as the HTLCs all expire
// quite soon.
assert_eq!(claim_txn.len(), 4);
@ -1554,21 +1554,21 @@ fn do_test_revoked_counterparty_htlc_tx_balances(anchors: bool) {
],
};
if anchors {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 1 }, coinbase_tx.output[1].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 1 }, coinbase_tx.output[1].value);
}
// Create some initial channels
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 12_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
let payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 3_000_100).0;
let failed_payment_hash = route_payment(&nodes[1], &[&nodes[0]], 1_000_000).1;
let revoked_local_txn = get_local_commitment_txn!(nodes[1], chan_id);
assert_eq!(revoked_local_txn[0].input.len(), 1);
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, funding_tx.txid());
assert_eq!(revoked_local_txn[0].input[0].previous_output.txid, funding_tx.compute_txid());
if anchors {
assert_eq!(revoked_local_txn[0].output[4].value.to_sat(), 11000); // to_self output
} else {
@ -1866,11 +1866,11 @@ fn do_test_revoked_counterparty_aggregated_claims(anchors: bool) {
script_pubkey: nodes[0].wallet_source.get_change_script().unwrap(),
}],
};
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 100_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
// We create two HTLCs, one which we will give A the preimage to to generate an HTLC-Success
@ -2183,19 +2183,19 @@ fn do_test_claimable_balance_correct_while_payment_pending(outbound_payment: boo
],
};
if anchors {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 1 }, coinbase_tx.output[1].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 1 }, coinbase_tx.output[1].value);
}
// Create a channel from A -> B
let (_, _, chan_ab_id, funding_tx_ab) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000 /* channel_value (sat) */, 0 /* push_msat */);
let funding_outpoint_ab = OutPoint { txid: funding_tx_ab.txid(), index: 0 };
let funding_outpoint_ab = OutPoint { txid: funding_tx_ab.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint_ab), chan_ab_id);
// Create a channel from B -> C
let (_, _, chan_bc_id, funding_tx_bc) =
create_announced_chan_between_nodes_with_value(&nodes, 1, 2, 1_000_000 /* channel_value (sat) */, 0 /* push_msat */);
let funding_outpoint_bc = OutPoint { txid: funding_tx_bc.txid(), index: 0 };
let funding_outpoint_bc = OutPoint { txid: funding_tx_bc.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint_bc), chan_bc_id);
let (chan_feerate, channel_type_features) = if outbound_payment {
@ -2274,7 +2274,7 @@ fn do_test_restored_packages_retry(check_old_monitor_retries_after_upgrade: bool
if nodes[0].connect_style.borrow().updates_best_block_first() {
let txn = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 1);
assert_eq!(txn[0].txid(), commitment_tx.txid());
assert_eq!(txn[0].compute_txid(), commitment_tx.compute_txid());
}
// Connect blocks until the HTLC's expiration is met, expecting a transaction broadcast.
@ -2362,7 +2362,7 @@ fn do_test_monitor_rebroadcast_pending_claims(anchors: bool) {
script_pubkey: nodes[0].wallet_source.get_change_script().unwrap(),
}],
};
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
// Set up a helper closure we'll use throughout our test. We should only expect retries without
// bumps if fees have not increased after a block has been connected (assuming the height timer
@ -2444,8 +2444,8 @@ fn do_test_monitor_rebroadcast_pending_claims(anchors: bool) {
// If we have a `ConnectStyle` that advertises the new block first without the transactions,
// we'll receive an extra bumped claim.
if nodes[0].connect_style.borrow().updates_best_block_first() {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.remove_utxo(bitcoin::OutPoint { txid: htlc_tx.txid(), vout: 1 });
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.remove_utxo(bitcoin::OutPoint { txid: htlc_tx.compute_txid(), vout: 1 });
check_htlc_retry(true, anchors);
}
nodes[0].chain_monitor.chain_monitor.rebroadcast_pending_claims();
@ -2518,7 +2518,7 @@ fn test_yield_anchors_events() {
script_pubkey: nodes[0].wallet_source.get_change_script().unwrap(),
}],
};
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].bump_tx_handler.handle_event(&event);
let mut txn = nodes[0].tx_broadcaster.unique_txn_broadcast();
assert_eq!(txn.len(), 2);
@ -2697,7 +2697,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
script_pubkey: nodes[1].wallet_source.get_change_script().unwrap(),
}],
};
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, utxo_value);
nodes[1].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, utxo_value);
match event {
Event::BumpTransaction(event) => nodes[1].bump_tx_handler.handle_event(&event),
_ => panic!("Unexpected event"),
@ -2705,7 +2705,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
let txn = nodes[1].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 2);
assert_eq!(txn[0].output.len(), 6); // 2 HTLC outputs + 1 to_self output + 1 to_remote output + 2 anchor outputs
if txn[0].input[0].previous_output.txid == chan_a.3.txid() {
if txn[0].input[0].previous_output.txid == chan_a.3.compute_txid() {
check_spends!(&txn[0], &chan_a.3);
} else {
check_spends!(&txn[0], &chan_b.3);
@ -2730,7 +2730,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
let txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(txn.len(), 4);
let (revoked_htlc_claim_a, revoked_htlc_claim_b) = if txn[0].input[0].previous_output.txid == revoked_commitment_txs[0].txid() {
let (revoked_htlc_claim_a, revoked_htlc_claim_b) = if txn[0].input[0].previous_output.txid == revoked_commitment_txs[0].compute_txid() {
(if txn[0].input.len() == 2 { &txn[0] } else { &txn[1] }, if txn[2].input.len() == 2 { &txn[2] } else { &txn[3] })
} else {
(if txn[2].input.len() == 2 { &txn[2] } else { &txn[3] }, if txn[0].input.len() == 2 { &txn[0] } else { &txn[1] })
@ -2778,7 +2778,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
version: Version::TWO,
lock_time: LockTime::ZERO,
input: vec![TxIn { // Fee input
previous_output: bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 },
previous_output: bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 },
..Default::default()
}],
output: vec![TxOut { // Fee input change
@ -2838,7 +2838,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
let revoked_htlc_claims = txn.iter().filter(|tx|
tx.input.len() == 2 &&
tx.output.len() == 1 &&
tx.input[0].previous_output.txid == htlc_tx.txid()
tx.input[0].previous_output.txid == htlc_tx.compute_txid()
).collect::<Vec<_>>();
assert_eq!(revoked_htlc_claims.len(), 2);
for revoked_htlc_claim in revoked_htlc_claims {
@ -2847,7 +2847,7 @@ fn test_anchors_aggregated_revoked_htlc_tx() {
let mut revoked_claim_transaction_map = new_hash_map();
for current_tx in txn.into_iter() {
revoked_claim_transaction_map.insert(current_tx.txid(), current_tx);
revoked_claim_transaction_map.insert(current_tx.compute_txid(), current_tx);
}
revoked_claim_transaction_map
};
@ -3016,7 +3016,7 @@ fn do_test_monitor_claims_with_random_signatures(anchors: bool, confirm_counterp
],
};
if anchors {
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.txid(), vout: 0 }, coinbase_tx.output[0].value);
nodes[0].wallet_source.add_utxo(bitcoin::OutPoint { txid: coinbase_tx.compute_txid(), vout: 0 }, coinbase_tx.output[0].value);
}
// Open a channel and route a payment. We'll let it timeout to claim it.
@ -3084,8 +3084,8 @@ fn do_test_monitor_claims_with_random_signatures(anchors: bool, confirm_counterp
{
let mut txn = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 1);
assert_eq!(txn[0].txid(), htlc_timeout_tx.txid());
assert_ne!(txn[0].wtxid(), htlc_timeout_tx.wtxid());
assert_eq!(txn[0].compute_txid(), htlc_timeout_tx.compute_txid());
assert_ne!(txn[0].compute_wtxid(), htlc_timeout_tx.compute_wtxid());
}
}

View file

@ -2771,8 +2771,8 @@ impl<NS: Deref> ReadableArgs<(Option<PublicKey>, &NS)> for InboundOnionPayload w
let mut custom_tlvs = Vec::new();
let tlv_len = BigSize::read(r)?;
let rd = FixedLengthReader::new(r, tlv_len.0);
decode_tlv_stream_with_custom_tlv_decode!(rd, {
let mut rd = FixedLengthReader::new(r, tlv_len.0);
decode_tlv_stream_with_custom_tlv_decode!(&mut rd, {
(2, amt, (option, encoding: (u64, HighZeroBytesDroppedBigSize))),
(4, cltv_value, (option, encoding: (u32, HighZeroBytesDroppedBigSize))),
(6, short_id, option),
@ -2786,7 +2786,7 @@ impl<NS: Deref> ReadableArgs<(Option<PublicKey>, &NS)> for InboundOnionPayload w
}, |msg_type: u64, msg_reader: &mut FixedLengthReader<_>| -> Result<bool, DecodeError> {
if msg_type < 1 << 16 { return Ok(false) }
let mut value = Vec::new();
msg_reader.read_to_end(&mut value)?;
msg_reader.read_to_limit(&mut value, u64::MAX)?;
custom_tlvs.push((msg_type, value));
Ok(true)
});
@ -4007,11 +4007,11 @@ mod tests {
output: vec![
TxOut {
value: Amount::from_sat(12704566),
script_pubkey: Address::from_str("bc1qzlffunw52jav8vwdu5x3jfk6sr8u22rmq3xzw2").unwrap().payload().script_pubkey(),
script_pubkey: Address::from_str("bc1qzlffunw52jav8vwdu5x3jfk6sr8u22rmq3xzw2").unwrap().assume_checked().script_pubkey(),
},
TxOut {
value: Amount::from_sat(245148),
script_pubkey: Address::from_str("bc1qxmk834g5marzm227dgqvynd23y2nvt2ztwcw2z").unwrap().payload().script_pubkey(),
script_pubkey: Address::from_str("bc1qxmk834g5marzm227dgqvynd23y2nvt2ztwcw2z").unwrap().assume_checked().script_pubkey(),
},
],
}).unwrap(),
@ -4030,7 +4030,7 @@ mod tests {
channel_id: ChannelId::from_bytes([2; 32]),
serial_id: 4886718345,
sats: 4886718345,
script: Address::from_str("bc1qxmk834g5marzm227dgqvynd23y2nvt2ztwcw2z").unwrap().payload().script_pubkey(),
script: Address::from_str("bc1qxmk834g5marzm227dgqvynd23y2nvt2ztwcw2z").unwrap().assume_checked().script_pubkey(),
};
let encoded_value = tx_add_output.encode();
let target_value = <Vec<u8>>::from_hex("0202020202020202020202020202020202020202020202020202020202020202000000012345678900000001234567890016001436ec78d514df462da95e6a00c24daa8915362d42").unwrap();
@ -4179,7 +4179,7 @@ mod tests {
scriptpubkey:
if script_type == 1 { Address::p2pkh(&::bitcoin::PublicKey{compressed: true, inner: pubkey_1}, Network::Testnet).script_pubkey() }
else if script_type == 2 { Address::p2sh(&script, Network::Testnet).unwrap().script_pubkey() }
else if script_type == 3 { Address::p2wpkh(&::bitcoin::PublicKey{compressed: true, inner: pubkey_1}, Network::Testnet).unwrap().script_pubkey() }
else if script_type == 3 { Address::p2wpkh(&::bitcoin::CompressedPublicKey(pubkey_1), Network::Testnet).script_pubkey() }
else { Address::p2wsh(&script, Network::Testnet).script_pubkey() },
};
let encoded_value = shutdown.encode();

View file

@ -702,7 +702,7 @@ fn do_retry_with_no_persist(confirm_before_reload: bool) {
if !confirm_before_reload {
let as_broadcasted_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(as_broadcasted_txn.len(), 1);
assert_eq!(as_broadcasted_txn[0].txid(), as_commitment_tx.txid());
assert_eq!(as_broadcasted_txn[0].compute_txid(), as_commitment_tx.compute_txid());
} else {
assert!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().is_empty());
}
@ -772,7 +772,7 @@ fn do_retry_with_no_persist(confirm_before_reload: bool) {
mine_transaction(&nodes[0], &as_commitment_tx);
let txn = nodes[0].tx_broadcaster.unique_txn_broadcast();
assert_eq!(txn.len(), 1);
assert_eq!(txn[0].txid(), as_commitment_tx.txid());
assert_eq!(txn[0].compute_txid(), as_commitment_tx.compute_txid());
}
mine_transaction(&nodes[0], &bs_htlc_claim_txn);
expect_payment_sent(&nodes[0], payment_preimage_1, None, true, false);

View file

@ -201,7 +201,7 @@ fn test_no_txn_manager_serialize_deserialize() {
nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
let chan_0_monitor_serialized =
get_monitor!(nodes[0], ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 })).encode();
get_monitor!(nodes[0], ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.compute_txid(), index: 0 })).encode();
reload_node!(nodes[0], nodes[0].node.encode(), &[&chan_0_monitor_serialized], persister, new_chain_monitor, nodes_0_deserialized);
nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init {
@ -289,7 +289,7 @@ fn test_manager_serialize_deserialize_events() {
let events_4 = nodes[0].node.get_and_clear_pending_events();
assert_eq!(events_4.len(), 0);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().len(), 1);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap()[0].txid(), funding_output.txid);
assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap()[0].compute_txid(), funding_output.txid);
// Make sure the channel is functioning as though the de/serialization never happened
assert_eq!(nodes[0].node.list_channels().len(), 1);
@ -459,7 +459,7 @@ fn test_manager_serialize_deserialize_inconsistent_monitor() {
let txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap();
assert_eq!(txn.len(), 1);
check_spends!(txn[0], funding_tx);
assert_eq!(txn[0].input[0].previous_output.txid, funding_tx.txid());
assert_eq!(txn[0].input[0].previous_output.txid, funding_tx.compute_txid());
}
check_added_monitors!(nodes[0], 1);
@ -1227,7 +1227,7 @@ fn test_reload_partial_funding_batch() {
assert_eq!(nodes[0].tx_broadcaster.txn_broadcast().len(), 0);
// Reload the node while a subset of the channels in the funding batch have persisted monitors.
let channel_id_1 = ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 });
let channel_id_1 = ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.compute_txid(), index: 0 });
let node_encoded = nodes[0].node.encode();
let channel_monitor_1_serialized = get_monitor!(nodes[0], channel_id_1).encode();
reload_node!(nodes[0], node_encoded, &[&channel_monitor_1_serialized], new_persister, new_chain_monitor, new_channel_manager);
@ -1249,9 +1249,9 @@ fn test_reload_partial_funding_batch() {
{
let broadcasted_txs = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(broadcasted_txs.len(), 1);
assert!(broadcasted_txs[0].txid() != tx.txid());
assert!(broadcasted_txs[0].compute_txid() != tx.compute_txid());
assert_eq!(broadcasted_txs[0].input.len(), 1);
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.txid());
assert_eq!(broadcasted_txs[0].input[0].previous_output.txid, tx.compute_txid());
}
// Ensure the channels don't exist anymore.

View file

@ -277,7 +277,7 @@ fn do_test_unconf_chan(reload_node: bool, reorg_after_reload: bool, use_funding_
assert_eq!(relevant_txids[0].1, chan_conf_height);
assert_eq!(block_hash_opt, Some(expected_hash));
let txid = relevant_txids[0].0;
assert_eq!(txid, chan.3.txid());
assert_eq!(txid, chan.3.compute_txid());
nodes[0].node.transaction_unconfirmed(&txid);
assert_eq!(nodes[0].node.list_usable_channels().len(), 0);
} else if connect_style == ConnectStyle::FullBlockViaListen {
@ -323,7 +323,7 @@ fn do_test_unconf_chan(reload_node: bool, reorg_after_reload: bool, use_funding_
assert_eq!(chan_conf_height, relevant_txids[0].1);
assert_eq!(block_hash_opt, Some(expected_hash));
let txid = relevant_txids[0].0;
assert_eq!(txid, chan.3.txid());
assert_eq!(txid, chan.3.compute_txid());
nodes[0].node.transaction_unconfirmed(&txid);
assert_eq!(nodes[0].node.list_channels().len(), 0);
} else if connect_style == ConnectStyle::FullBlockViaListen {
@ -436,7 +436,7 @@ fn test_set_outpoints_partial_claiming() {
assert_eq!(remote_txn.len(), 3);
assert_eq!(remote_txn[0].output.len(), 4);
assert_eq!(remote_txn[0].input.len(), 1);
assert_eq!(remote_txn[0].input[0].previous_output.txid, chan.3.txid());
assert_eq!(remote_txn[0].input[0].previous_output.txid, chan.3.compute_txid());
check_spends!(remote_txn[1], remote_txn[0]);
check_spends!(remote_txn[2], remote_txn[0]);
@ -532,7 +532,7 @@ fn do_test_to_remote_after_local_detection(style: ConnectStyle) {
let (_, _, chan_id, funding_tx) =
create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 100_000_000);
let funding_outpoint = OutPoint { txid: funding_tx.txid(), index: 0 };
let funding_outpoint = OutPoint { txid: funding_tx.compute_txid(), index: 0 };
assert_eq!(ChannelId::v1_from_funding_outpoint(funding_outpoint), chan_id);
let remote_txn_a = get_local_commitment_txn!(nodes[0], chan_id);
@ -732,7 +732,7 @@ fn test_htlc_preimage_claim_prev_counterparty_commitment_after_current_counterpa
let mut txn = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 1);
let current_commitment_a = txn.pop().unwrap();
assert_ne!(current_commitment_a.txid(), prev_commitment_a.txid());
assert_ne!(current_commitment_a.compute_txid(), prev_commitment_a.compute_txid());
check_spends!(current_commitment_a, funding_tx);
mine_transaction(&nodes[0], &current_commitment_a);
@ -871,7 +871,7 @@ fn do_test_retries_own_commitment_broadcast_after_reorg(anchors: bool, revoked_c
assert_eq!(txn.len(), 2);
check_spends!(txn[0], txn[1]); // HTLC timeout A
check_spends!(txn[1], funding_tx); // Commitment A
assert_ne!(txn[1].txid(), commitment_b.txid());
assert_ne!(txn[1].compute_txid(), commitment_b.compute_txid());
}
}
}

View file

@ -50,7 +50,7 @@ fn pre_funding_lock_shutdown_test() {
mine_transaction(&nodes[0], &tx);
mine_transaction(&nodes[1], &tx);
nodes[0].node.close_channel(&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 }), &nodes[1].node.get_our_node_id()).unwrap();
nodes[0].node.close_channel(&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.compute_txid(), index: 0 }), &nodes[1].node.get_our_node_id()).unwrap();
let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &node_0_shutdown);
let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
@ -1425,7 +1425,7 @@ fn batch_funding_failure() {
let err = "Error in transaction funding: Misuse error: No output matched the script_pubkey and value in the FundingGenerationReady event".to_string();
let temp_err = "No output matched the script_pubkey and value in the FundingGenerationReady event".to_string();
let post_funding_chan_id_a = ChannelId::v1_from_funding_txid(tx.txid().as_ref(), 0);
let post_funding_chan_id_a = ChannelId::v1_from_funding_txid(tx.compute_txid().as_ref(), 0);
let close = [
ExpectedCloseEvent::from_id_reason(post_funding_chan_id_a, true, ClosureReason::ProcessingError { err: err.clone() }),
ExpectedCloseEvent::from_id_reason(temp_chan_id_b, false, ClosureReason::ProcessingError { err: temp_err }),

View file

@ -106,7 +106,7 @@ use bitcoin::{WitnessProgram, Network, WitnessVersion};
use bitcoin::constants::ChainHash;
use bitcoin::secp256k1::{Keypair, PublicKey, Secp256k1, self};
use bitcoin::secp256k1::schnorr::Signature;
use bitcoin::address::{Address, Payload};
use bitcoin::address::Address;
use core::time::Duration;
use core::hash::{Hash, Hasher};
use crate::io;
@ -128,7 +128,7 @@ use crate::offers::parse::{Bolt12ParseError, Bolt12SemanticError, ParsedMessage}
use crate::offers::payer::{PAYER_METADATA_TYPE, PayerTlvStream, PayerTlvStreamRef};
use crate::offers::refund::{IV_BYTES_WITH_METADATA as REFUND_IV_BYTES_WITH_METADATA, IV_BYTES_WITHOUT_METADATA as REFUND_IV_BYTES_WITHOUT_METADATA, Refund, RefundContents};
use crate::offers::signer::{Metadata, self};
use crate::util::ser::{HighZeroBytesDroppedBigSize, Iterable, Readable, SeekReadable, WithoutLength, Writeable, Writer};
use crate::util::ser::{CursorReadable, HighZeroBytesDroppedBigSize, Iterable, Readable, WithoutLength, Writeable, Writer};
use crate::util::string::PrintableString;
#[allow(unused_imports)]
@ -1106,12 +1106,11 @@ pub(super) fn filter_fallbacks(
Err(_) => return None,
};
let program = address.program.clone();
let witness_program = match WitnessProgram::new(version, program) {
let witness_program = match WitnessProgram::new(version, &address.program) {
Ok(witness_program) => witness_program,
Err(_) => return None,
};
Some(Address::new(network, Payload::WitnessProgram(witness_program)))
Some(Address::from_witness_program(witness_program, network))
};
fallbacks.iter().filter_map(to_valid_address).collect()
@ -1274,13 +1273,13 @@ type FullInvoiceTlvStreamRef<'a> = (
SignatureTlvStreamRef<'a>,
);
impl SeekReadable for FullInvoiceTlvStream {
fn read<R: io::Read + io::Seek>(r: &mut R) -> Result<Self, DecodeError> {
let payer = SeekReadable::read(r)?;
let offer = SeekReadable::read(r)?;
let invoice_request = SeekReadable::read(r)?;
let invoice = SeekReadable::read(r)?;
let signature = SeekReadable::read(r)?;
impl CursorReadable for FullInvoiceTlvStream {
fn read<R: AsRef<[u8]>>(r: &mut io::Cursor<R>) -> Result<Self, DecodeError> {
let payer = CursorReadable::read(r)?;
let offer = CursorReadable::read(r)?;
let invoice_request = CursorReadable::read(r)?;
let invoice = CursorReadable::read(r)?;
let signature = CursorReadable::read(r)?;
Ok((payer, offer, invoice_request, invoice, signature))
}
@ -1296,12 +1295,12 @@ type PartialInvoiceTlvStreamRef<'a> = (
InvoiceTlvStreamRef<'a>,
);
impl SeekReadable for PartialInvoiceTlvStream {
fn read<R: io::Read + io::Seek>(r: &mut R) -> Result<Self, DecodeError> {
let payer = SeekReadable::read(r)?;
let offer = SeekReadable::read(r)?;
let invoice_request = SeekReadable::read(r)?;
let invoice = SeekReadable::read(r)?;
impl CursorReadable for PartialInvoiceTlvStream {
fn read<R: AsRef<[u8]>>(r: &mut io::Cursor<R>) -> Result<Self, DecodeError> {
let payer = CursorReadable::read(r)?;
let offer = CursorReadable::read(r)?;
let invoice_request = CursorReadable::read(r)?;
let invoice = CursorReadable::read(r)?;
Ok((payer, offer, invoice_request, invoice))
}
@ -1430,13 +1429,13 @@ pub(super) fn check_invoice_signing_pubkey(
mod tests {
use super::{Bolt12Invoice, DEFAULT_RELATIVE_EXPIRY, FallbackAddress, FullInvoiceTlvStreamRef, InvoiceTlvStreamRef, SIGNATURE_TAG, UnsignedBolt12Invoice};
use bitcoin::{WitnessProgram, WitnessVersion};
use bitcoin::{CompressedPublicKey, WitnessProgram, WitnessVersion};
use bitcoin::constants::ChainHash;
use bitcoin::script::ScriptBuf;
use bitcoin::hashes::Hash;
use bitcoin::network::Network;
use bitcoin::secp256k1::{Keypair, Message, Secp256k1, SecretKey, XOnlyPublicKey, self};
use bitcoin::address::{Address, Payload};
use bitcoin::address::Address;
use bitcoin::key::TweakedPublicKey;
use core::time::Duration;
@ -2000,7 +1999,7 @@ mod tests {
invoice.fallbacks(),
vec![
Address::p2wsh(&script, Network::Bitcoin),
Address::p2wpkh(&pubkey, Network::Bitcoin).unwrap(),
Address::p2wpkh(&CompressedPublicKey(pubkey.inner), Network::Bitcoin),
Address::p2tr_tweaked(tweaked_pubkey, Network::Bitcoin),
],
);
@ -2070,7 +2069,7 @@ mod tests {
.sign(payer_sign)
{
Ok(_) => panic!("expected error"),
Err(e) => assert_eq!(e, SignError::Verification(secp256k1::Error::InvalidSignature)),
Err(e) => assert_eq!(e, SignError::Verification(secp256k1::Error::IncorrectSignature)),
}
}
@ -2308,16 +2307,16 @@ mod tests {
match Bolt12Invoice::try_from(buffer) {
Ok(invoice) => {
let v1_witness_program = WitnessProgram::new(WitnessVersion::V1, vec![0u8; 33]).unwrap();
let v2_witness_program = WitnessProgram::new(WitnessVersion::V2, vec![0u8; 40]).unwrap();
let v1_witness_program = WitnessProgram::new(WitnessVersion::V1, &[0u8; 33]).unwrap();
let v2_witness_program = WitnessProgram::new(WitnessVersion::V2, &[0u8; 40]).unwrap();
assert_eq!(
invoice.fallbacks(),
vec![
Address::p2wsh(&script, Network::Bitcoin),
Address::p2wpkh(&pubkey, Network::Bitcoin).unwrap(),
Address::p2wpkh(&CompressedPublicKey(pubkey.inner), Network::Bitcoin),
Address::p2tr_tweaked(tweaked_pubkey, Network::Bitcoin),
Address::new(Network::Bitcoin, Payload::WitnessProgram(v1_witness_program)),
Address::new(Network::Bitcoin, Payload::WitnessProgram(v2_witness_program)),
Address::from_witness_program(v1_witness_program, Network::Bitcoin),
Address::from_witness_program(v2_witness_program, Network::Bitcoin),
],
);
},
@ -2479,7 +2478,7 @@ mod tests {
match Bolt12Invoice::try_from(buffer) {
Ok(_) => panic!("expected error"),
Err(e) => {
assert_eq!(e, Bolt12ParseError::InvalidSignature(secp256k1::Error::InvalidSignature));
assert_eq!(e, Bolt12ParseError::InvalidSignature(secp256k1::Error::IncorrectSignature));
},
}
}

View file

@ -76,7 +76,7 @@ use crate::offers::offer::{Offer, OfferContents, OfferId, OfferTlvStream, OfferT
use crate::offers::parse::{Bolt12ParseError, ParsedMessage, Bolt12SemanticError};
use crate::offers::payer::{PayerContents, PayerTlvStream, PayerTlvStreamRef};
use crate::offers::signer::{Metadata, MetadataMaterial};
use crate::util::ser::{HighZeroBytesDroppedBigSize, Readable, SeekReadable, WithoutLength, Writeable, Writer};
use crate::util::ser::{CursorReadable, HighZeroBytesDroppedBigSize, Readable, WithoutLength, Writeable, Writer};
use crate::util::string::{PrintableString, UntrustedString};
#[cfg(not(c_bindings))]
@ -1071,12 +1071,12 @@ type FullInvoiceRequestTlvStreamRef<'a> = (
SignatureTlvStreamRef<'a>,
);
impl SeekReadable for FullInvoiceRequestTlvStream {
fn read<R: io::Read + io::Seek>(r: &mut R) -> Result<Self, DecodeError> {
let payer = SeekReadable::read(r)?;
let offer = SeekReadable::read(r)?;
let invoice_request = SeekReadable::read(r)?;
let signature = SeekReadable::read(r)?;
impl CursorReadable for FullInvoiceRequestTlvStream {
fn read<R: AsRef<[u8]>>(r: &mut io::Cursor<R>) -> Result<Self, DecodeError> {
let payer = CursorReadable::read(r)?;
let offer = CursorReadable::read(r)?;
let invoice_request = CursorReadable::read(r)?;
let signature = CursorReadable::read(r)?;
Ok((payer, offer, invoice_request, signature))
}
@ -1890,7 +1890,7 @@ mod tests {
.sign(recipient_sign)
{
Ok(_) => panic!("expected error"),
Err(e) => assert_eq!(e, SignError::Verification(secp256k1::Error::InvalidSignature)),
Err(e) => assert_eq!(e, SignError::Verification(secp256k1::Error::IncorrectSignature)),
}
}
@ -2277,7 +2277,7 @@ mod tests {
match InvoiceRequest::try_from(buffer) {
Ok(_) => panic!("expected error"),
Err(e) => {
assert_eq!(e, Bolt12ParseError::InvalidSignature(secp256k1::Error::InvalidSignature));
assert_eq!(e, Bolt12ParseError::InvalidSignature(secp256k1::Error::IncorrectSignature));
},
}
}

View file

@ -12,7 +12,7 @@
use bitcoin::secp256k1;
use crate::io;
use crate::ln::msgs::DecodeError;
use crate::util::ser::SeekReadable;
use crate::util::ser::CursorReadable;
#[allow(unused_imports)]
use crate::prelude::*;
@ -91,17 +91,17 @@ mod sealed {
/// A wrapper for reading a message as a TLV stream `T` from a byte sequence, while still
/// maintaining ownership of the bytes for later use.
pub(super) struct ParsedMessage<T: SeekReadable> {
pub(super) struct ParsedMessage<T: CursorReadable> {
pub bytes: Vec<u8>,
pub tlv_stream: T,
}
impl<T: SeekReadable> TryFrom<Vec<u8>> for ParsedMessage<T> {
impl<T: CursorReadable> TryFrom<Vec<u8>> for ParsedMessage<T> {
type Error = DecodeError;
fn try_from(bytes: Vec<u8>) -> Result<Self, Self::Error> {
let mut cursor = io::Cursor::new(bytes);
let tlv_stream: T = SeekReadable::read(&mut cursor)?;
let tlv_stream: T = CursorReadable::read(&mut cursor)?;
// Ensure that there are no more TLV records left to parse.
if cursor.position() < cursor.get_ref().len() as u64 {

View file

@ -105,7 +105,7 @@ use crate::offers::offer::{OfferTlvStream, OfferTlvStreamRef};
use crate::offers::parse::{Bech32Encode, Bolt12ParseError, Bolt12SemanticError, ParsedMessage};
use crate::offers::payer::{PayerContents, PayerTlvStream, PayerTlvStreamRef};
use crate::offers::signer::{Metadata, MetadataMaterial, self};
use crate::util::ser::{SeekReadable, Readable, WithoutLength, Writeable, Writer};
use crate::util::ser::{CursorReadable, Readable, WithoutLength, Writeable, Writer};
use crate::util::string::PrintableString;
#[cfg(not(c_bindings))]
@ -802,11 +802,11 @@ type RefundTlvStreamRef<'a> = (
InvoiceRequestTlvStreamRef<'a>,
);
impl SeekReadable for RefundTlvStream {
fn read<R: io::Read + io::Seek>(r: &mut R) -> Result<Self, DecodeError> {
let payer = SeekReadable::read(r)?;
let offer = SeekReadable::read(r)?;
let invoice_request = SeekReadable::read(r)?;
impl CursorReadable for RefundTlvStream {
fn read<R: AsRef<[u8]>>(r: &mut io::Cursor<R>) -> Result<Self, DecodeError> {
let payer = CursorReadable::read(r)?;
let offer = CursorReadable::read(r)?;
let invoice_request = CursorReadable::read(r)?;
Ok((payer, offer, invoice_request))
}

View file

@ -28,7 +28,7 @@ use crate::offers::offer::{
Amount, Offer, OfferContents, OfferTlvStream, OfferTlvStreamRef, Quantity,
};
use crate::offers::parse::{Bolt12ParseError, Bolt12SemanticError, ParsedMessage};
use crate::util::ser::{Iterable, SeekReadable, WithoutLength, Writeable, Writer};
use crate::util::ser::{CursorReadable, Iterable, WithoutLength, Writeable, Writer};
use crate::util::string::PrintableString;
use bitcoin::address::Address;
use bitcoin::constants::ChainHash;
@ -458,11 +458,11 @@ impl TryFrom<Vec<u8>> for StaticInvoice {
type FullInvoiceTlvStream = (OfferTlvStream, InvoiceTlvStream, SignatureTlvStream);
impl SeekReadable for FullInvoiceTlvStream {
fn read<R: io::Read + io::Seek>(r: &mut R) -> Result<Self, DecodeError> {
let offer = SeekReadable::read(r)?;
let invoice = SeekReadable::read(r)?;
let signature = SeekReadable::read(r)?;
impl CursorReadable for FullInvoiceTlvStream {
fn read<R: AsRef<[u8]>>(r: &mut io::Cursor<R>) -> Result<Self, DecodeError> {
let offer = CursorReadable::read(r)?;
let invoice = CursorReadable::read(r)?;
let signature = CursorReadable::read(r)?;
Ok((offer, invoice, signature))
}
@ -1130,7 +1130,7 @@ mod tests {
Err(e) => {
assert_eq!(
e,
Bolt12ParseError::InvalidSignature(secp256k1::Error::InvalidSignature)
Bolt12ParseError::InvalidSignature(secp256k1::Error::IncorrectSignature)
);
},
}

View file

@ -172,7 +172,7 @@ impl<L: Logger + ?Sized> ReadableArgs<(u64, &L)> for OffersMessage {
}
let mut bytes = Vec::new();
r.read_to_end(&mut bytes).unwrap();
r.read_to_limit(&mut bytes, u64::MAX).unwrap();
match Self::parse(tlv_type, bytes) {
Ok(message) => Ok(message),

View file

@ -3908,14 +3908,16 @@ pub mod benches {
let mut v = Vec::new();
d.read_to_end(&mut v).unwrap();
bench.bench_function("read_network_graph", |b| b.iter(||
NetworkGraph::read(&mut std::io::Cursor::new(black_box(&v)), &logger).unwrap()
NetworkGraph::read(&mut crate::io::Cursor::new(black_box(&v)), &logger).unwrap()
));
}
pub fn write_network_graph(bench: &mut Criterion) {
let logger = crate::util::test_utils::TestLogger::new();
let (mut d, _) = crate::routing::router::bench_utils::get_graph_scorer_file().unwrap();
let net_graph = NetworkGraph::read(&mut d, &logger).unwrap();
let mut graph_buffer = Vec::new();
d.read_to_end(&mut graph_buffer).unwrap();
let net_graph = NetworkGraph::read(&mut &graph_buffer[..], &logger).unwrap();
bench.bench_function("write_network_graph", |b| b.iter(||
black_box(&net_graph).encode()
));

View file

@ -8696,7 +8696,7 @@ mod tests {
pub(crate) mod bench_utils {
use super::*;
use std::fs::File;
use std::io::Read;
use bitcoin::hashes::Hash;
use bitcoin::secp256k1::SecretKey;
@ -8756,9 +8756,13 @@ pub(crate) mod bench_utils {
pub(crate) fn read_graph_scorer(logger: &TestLogger)
-> Result<(Arc<NetworkGraph<&TestLogger>>, ProbabilisticScorer<Arc<NetworkGraph<&TestLogger>>, &TestLogger>), &'static str> {
let (mut graph_file, mut scorer_file) = get_graph_scorer_file()?;
let graph = Arc::new(NetworkGraph::read(&mut graph_file, logger).unwrap());
let mut graph_buffer = Vec::new();
let mut scorer_buffer = Vec::new();
graph_file.read_to_end(&mut graph_buffer).unwrap();
scorer_file.read_to_end(&mut scorer_buffer).unwrap();
let graph = Arc::new(NetworkGraph::read(&mut &graph_buffer[..], logger).unwrap());
let scorer_args = (Default::default(), Arc::clone(&graph), logger);
let scorer = ProbabilisticScorer::read(&mut scorer_file, scorer_args).unwrap();
let scorer = ProbabilisticScorer::read(&mut &scorer_buffer[..], scorer_args).unwrap();
Ok((graph, scorer))
}

View file

@ -1327,8 +1327,8 @@ impl InMemorySigner {
.unwrap()[..]
);
let local_delayedsig = EcdsaSignature {
sig: sign_with_aux_rand(secp_ctx, &sighash, &delayed_payment_key, &self),
hash_ty: EcdsaSighashType::All,
signature: sign_with_aux_rand(secp_ctx, &sighash, &delayed_payment_key, &self),
sighash_type: EcdsaSighashType::All,
};
let payment_script =
bitcoin::Address::p2wsh(&witness_script, Network::Bitcoin).script_pubkey();
@ -1699,6 +1699,7 @@ impl EcdsaChannelSigner for InMemorySigner {
}
#[cfg(taproot)]
#[allow(unused)]
impl TaprootChannelSigner for InMemorySigner {
fn generate_local_nonce_pair(
&self, commitment_number: u64, secp_ctx: &Secp256k1<All>,
@ -2106,13 +2107,12 @@ impl KeysManager {
};
let pubkey = Xpub::from_priv(&secp_ctx, &secret).to_pub();
if derivation_idx == 2 {
assert_eq!(pubkey.inner, self.shutdown_pubkey);
assert_eq!(pubkey.0, self.shutdown_pubkey);
}
let witness_script =
bitcoin::Address::p2pkh(&pubkey, Network::Testnet).script_pubkey();
let payment_script = bitcoin::Address::p2wpkh(&pubkey, Network::Testnet)
.expect("uncompressed key found")
.script_pubkey();
let payment_script =
bitcoin::Address::p2wpkh(&pubkey, Network::Testnet).script_pubkey();
if payment_script != output.script_pubkey {
return Err(());
@ -2131,8 +2131,7 @@ impl KeysManager {
let sig = sign_with_aux_rand(secp_ctx, &sighash, &secret.private_key, &self);
let mut sig_ser = sig.serialize_der().to_vec();
sig_ser.push(EcdsaSighashType::All as u8);
let witness =
Witness::from_slice(&[&sig_ser, &pubkey.inner.serialize().to_vec()]);
let witness = Witness::from_slice(&[&sig_ser, &pubkey.0.serialize().to_vec()]);
psbt.inputs[input_idx].final_script_witness = Some(witness);
},
}

View file

@ -8,6 +8,7 @@ where
// in practice, this will only ever be an EcdsaChannelSigner (specifically, Writeable)
Ecdsa(<SP::Target as SignerProvider>::EcdsaSigner),
#[cfg(taproot)]
#[allow(unused)]
Taproot(<SP::Target as SignerProvider>::TaprootSigner),
}
@ -19,6 +20,7 @@ where
match self {
ChannelSignerType::Ecdsa(ecs) => ecs,
#[cfg(taproot)]
#[allow(unused)]
ChannelSignerType::Taproot(tcs) => tcs,
}
}
@ -27,6 +29,7 @@ where
match self {
ChannelSignerType::Ecdsa(ecs) => ecs,
#[cfg(taproot)]
#[allow(unused)]
ChannelSignerType::Taproot(tcs) => tcs,
}
}

View file

@ -107,7 +107,7 @@ impl<'a> core::fmt::Display for DebugTx<'a> {
debug_assert!(false, "We should never generate unknown transaction types");
write!(f, "unknown tx type ").unwrap();
}
write!(f, "with txid {}", self.0.txid())?;
write!(f, "with txid {}", self.0.compute_txid())?;
Ok(())
}
}

View file

@ -14,7 +14,7 @@
//! [`ChannelMonitor`]: crate::chain::channelmonitor::ChannelMonitor
use crate::prelude::*;
use crate::io::{self, Read, Seek, Write};
use crate::io::{self, BufRead, Read, Write};
use crate::io_extras::{copy, sink};
use core::hash::Hash;
use crate::sync::{Mutex, RwLock};
@ -66,6 +66,64 @@ impl<W: Write> Writer for W {
}
}
/// Wrap buffering support for implementations of Read.
/// A [`Read`]er which keeps an internal buffer to avoid hitting the underlying stream directly for
/// every read, implementing [`BufRead`].
///
/// In order to avoid reading bytes past the first object, and those bytes then ending up getting
/// dropped, this BufReader operates in one-byte-increments.
struct BufReader<'a, R: Read> {
inner: &'a mut R,
buf: [u8; 1],
is_consumed: bool
}
impl<'a, R: Read> BufReader<'a, R> {
/// Creates a [`BufReader`] which will read from the given `inner`.
pub fn new(inner: &'a mut R) -> Self {
BufReader {
inner,
buf: [0; 1],
is_consumed: true
}
}
}
impl<'a, R: Read> Read for BufReader<'a, R> {
#[inline]
fn read(&mut self, output: &mut [u8]) -> io::Result<usize> {
let input = self.fill_buf()?;
let count = cmp::min(input.len(), output.len());
output[..count].copy_from_slice(&input[..count]);
self.consume(count);
Ok(count)
}
}
impl<'a, R: Read> BufRead for BufReader<'a, R> {
#[inline]
fn fill_buf(&mut self) -> io::Result<&[u8]> {
if self.is_consumed {
let count = self.inner.read(&mut self.buf[..])?;
debug_assert!(count <= 1, "read gave us a garbage length");
// upon hitting EOF, assume the byte is already consumed
self.is_consumed = count == 0;
}
Ok(&self.buf[..])
}
#[inline]
fn consume(&mut self, amount: usize) {
if amount >= 1 {
debug_assert_eq!(amount, 1, "Can only consume one byte");
debug_assert!(!self.is_consumed, "Cannot consume more than had been read");
self.is_consumed = true;
}
}
}
pub(crate) struct WriterWriteAdaptor<'a, W: Writer + 'a>(pub &'a mut W);
impl<'a, W: Writer + 'a> Write for WriterWriteAdaptor<'a, W> {
#[inline]
@ -166,18 +224,18 @@ impl<'a, R: Read> LengthRead for FixedLengthReader<'a, R> {
/// between "EOF reached before we started" and "EOF reached mid-read".
///
/// This is not exported to bindings users as manual TLV building is not currently supported in bindings
pub struct ReadTrackingReader<R: Read> {
read: R,
pub struct ReadTrackingReader<'a, R: Read> {
read: &'a mut R,
/// Returns whether we have read from this reader or not yet.
pub have_read: bool,
}
impl<R: Read> ReadTrackingReader<R> {
impl<'a, R: Read> ReadTrackingReader<'a, R> {
/// Returns a new [`ReadTrackingReader`].
pub fn new(read: R) -> Self {
pub fn new(read: &'a mut R) -> Self {
Self { read, have_read: false }
}
}
impl<R: Read> Read for ReadTrackingReader<R> {
impl<'a, R: Read> Read for ReadTrackingReader<'a, R> {
#[inline]
fn read(&mut self, dest: &mut [u8]) -> Result<usize, io::Error> {
match self.read.read(dest) {
@ -249,10 +307,10 @@ pub trait Readable
}
/// A trait that various LDK types implement allowing them to be read in from a
/// [`Read`]` + `[`Seek`].
pub(crate) trait SeekReadable where Self: Sized {
/// [`io::Cursor`].
pub(crate) trait CursorReadable where Self: Sized {
/// Reads a `Self` in from the given [`Read`].
fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, DecodeError>;
fn read<R: AsRef<[u8]>>(reader: &mut io::Cursor<R>) -> Result<Self, DecodeError>;
}
/// A trait that various higher-level LDK types implement allowing them to be read in
@ -682,10 +740,10 @@ impl<S: AsWriteableSlice> Writeable for WithoutLength<S> {
impl<T: MaybeReadable> Readable for WithoutLength<Vec<T>> {
#[inline]
fn read<R: Read>(mut reader: &mut R) -> Result<Self, DecodeError> {
fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError> {
let mut values = Vec::new();
loop {
let mut track_read = ReadTrackingReader::new(&mut reader);
let mut track_read = ReadTrackingReader::new(reader);
match MaybeReadable::read(&mut track_read) {
Ok(Some(v)) => { values.push(v); },
Ok(None) => { },
@ -1278,10 +1336,11 @@ macro_rules! impl_consensus_ser {
impl Readable for $bitcoin_type {
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
match consensus::encode::Decodable::consensus_decode(r) {
let mut reader = BufReader::<_>::new(r);
match consensus::encode::Decodable::consensus_decode(&mut reader) {
Ok(t) => Ok(t),
Err(consensus::encode::Error::Io(ref e)) if e.kind() == io::ErrorKind::UnexpectedEof => Err(DecodeError::ShortRead),
Err(consensus::encode::Error::Io(e)) => Err(DecodeError::Io(e.kind())),
Err(consensus::encode::Error::Io(e)) => Err(DecodeError::Io(e.kind().into())),
Err(_) => Err(DecodeError::InvalidValue),
}
}

View file

@ -456,7 +456,7 @@ macro_rules! _decode_tlv_stream_match_check {
/// For example,
/// ```
/// # use lightning::decode_tlv_stream;
/// # fn read<R: lightning::io::Read> (stream: R) -> Result<(), lightning::ln::msgs::DecodeError> {
/// # fn read<R: lightning::io::Read> (stream: &mut R) -> Result<(), lightning::ln::msgs::DecodeError> {
/// let mut required_value = 0u64;
/// let mut optional_value: Option<u64> = None;
/// decode_tlv_stream!(stream, {
@ -509,7 +509,7 @@ macro_rules! _decode_tlv_stream_range {
$(, $decode_custom_tlv: expr)?) => { {
use $crate::ln::msgs::DecodeError;
let mut last_seen_type: Option<u64> = None;
let mut stream_ref = $stream;
let stream_ref = $stream;
'tlv_read: loop {
use $crate::util::ser;
@ -519,7 +519,7 @@ macro_rules! _decode_tlv_stream_range {
// determine whether we should break or return ShortRead if we get an
// UnexpectedEof. This should in every case be largely cosmetic, but its nice to
// pass the TLV test vectors exactly, which require this distinction.
let mut tracking_reader = ser::ReadTrackingReader::new(&mut stream_ref);
let mut tracking_reader = ser::ReadTrackingReader::new(stream_ref);
match <$crate::util::ser::BigSize as $crate::util::ser::Readable>::read(&mut tracking_reader) {
Err(DecodeError::ShortRead) => {
if !tracking_reader.have_read {
@ -555,8 +555,8 @@ macro_rules! _decode_tlv_stream_range {
last_seen_type = Some(typ.0);
// Finally, read the length and value itself:
let length: ser::BigSize = $crate::util::ser::Readable::read(&mut stream_ref)?;
let mut s = ser::FixedLengthReader::new(&mut stream_ref, length.0);
let length: ser::BigSize = $crate::util::ser::Readable::read(stream_ref)?;
let mut s = ser::FixedLengthReader::new(stream_ref, length.0);
match typ.0 {
$(_t if $crate::_decode_tlv_stream_match_check!(_t, $type, $fieldty) => {
$crate::_decode_tlv!($stream, s, $field, $fieldty);
@ -952,13 +952,13 @@ macro_rules! tlv_stream {
}
}
impl $crate::util::ser::SeekReadable for $name {
fn read<R: $crate::io::Read + $crate::io::Seek>(reader: &mut R) -> Result<Self, $crate::ln::msgs::DecodeError> {
impl $crate::util::ser::CursorReadable for $name {
fn read<R: AsRef<[u8]>>(reader: &mut crate::io::Cursor<R>) -> Result<Self, $crate::ln::msgs::DecodeError> {
$(
_init_tlv_field_var!($field, option);
)*
let rewind = |cursor: &mut R, offset: usize| {
cursor.seek($crate::io::SeekFrom::Current(-(offset as i64))).expect("");
let rewind = |cursor: &mut crate::io::Cursor<R>, offset: usize| {
cursor.set_position(cursor.position().checked_sub(offset as u64).expect("Cannot rewind past 0."));
};
_decode_tlv_stream_range!(reader, $range, rewind, {
$(($type, $field, (option, encoding: $fieldty))),*
@ -1102,7 +1102,7 @@ macro_rules! impl_writeable_tlv_based_enum {
}),*
$($tuple_variant_id => {
let length: $crate::util::ser::BigSize = $crate::util::ser::Readable::read(reader)?;
let mut s = $crate::util::ser::FixedLengthReader::new(&mut reader, length.0);
let mut s = $crate::util::ser::FixedLengthReader::new(reader, length.0);
let res = $crate::util::ser::Readable::read(&mut s)?;
if s.bytes_remain() {
s.eat_remaining()?; // Return ShortRead if there's actually not enough bytes
@ -1214,7 +1214,7 @@ macro_rules! impl_writeable_tlv_based_enum_upgradable {
}),*
$($tuple_variant_id => {
let length: $crate::util::ser::BigSize = $crate::util::ser::Readable::read(reader)?;
let mut s = $crate::util::ser::FixedLengthReader::new(&mut reader, length.0);
let mut s = $crate::util::ser::FixedLengthReader::new(reader, length.0);
let res = $crate::util::ser::Readable::read(&mut s)?;
if s.bytes_remain() {
s.eat_remaining()?; // Return ShortRead if there's actually not enough bytes

View file

@ -500,7 +500,7 @@ where
log_debug!(
self.logger,
"Generating and broadcasting sweeping transaction {}",
spending_tx.txid()
spending_tx.compute_txid()
);
spending_tx
},
@ -534,7 +534,7 @@ where
if cur_height >= confirmation_height + ANTI_REORG_DELAY - 1 {
log_debug!(self.logger,
"Pruning swept output as sufficiently confirmed via spend in transaction {:?}. Pruned descriptor: {:?}",
o.status.latest_spending_tx().map(|t| t.txid()), o.descriptor
o.status.latest_spending_tx().map(|t| t.compute_txid()), o.descriptor
);
return false;
}
@ -697,7 +697,7 @@ where
let unconf_height = state_lock
.outputs
.iter()
.find(|o| o.status.latest_spending_tx().map(|tx| tx.txid()) == Some(*txid))
.find(|o| o.status.latest_spending_tx().map(|tx| tx.compute_txid()) == Some(*txid))
.and_then(|o| o.status.confirmation_height());
if let Some(unconf_height) = unconf_height {
@ -742,7 +742,11 @@ where
confirmation_height,
confirmation_hash,
..
} => Some((latest_spending_tx.txid(), confirmation_height, Some(confirmation_hash))),
} => Some((
latest_spending_tx.compute_txid(),
confirmation_height,
Some(confirmation_hash),
)),
_ => None,
})
.collect::<Vec<_>>()

View file

@ -33,7 +33,7 @@ use bitcoin::secp256k1::All;
use bitcoin::secp256k1::{SecretKey, PublicKey};
use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature};
#[cfg(taproot)]
use musig2::types::{PartialSignature, PublicNonce, SecretNonce};
use musig2::types::{PartialSignature, PublicNonce};
use crate::sign::HTLCDescriptor;
use crate::util::ser::{Writeable, Writer};
use crate::io::Error;
@ -352,6 +352,7 @@ impl EcdsaChannelSigner for TestChannelSigner {
}
#[cfg(taproot)]
#[allow(unused)]
impl TaprootChannelSigner for TestChannelSigner {
fn generate_local_nonce_pair(&self, commitment_number: u64, secp_ctx: &Secp256k1<All>) -> PublicNonce {
todo!()

View file

@ -732,7 +732,7 @@ impl TestBroadcaster {
pub fn unique_txn_broadcast(&self) -> Vec<Transaction> {
let mut txn = self.txn_broadcasted.lock().unwrap().split_off(0);
let mut seen = new_hash_set();
txn.retain(|tx| seen.insert(tx.txid()));
txn.retain(|tx| seen.insert(tx.compute_txid()));
txn
}
}
@ -1591,8 +1591,8 @@ impl WalletSource for TestWalletSource {
let sighash = SighashCache::new(&tx)
.legacy_signature_hash(i, &utxo.output.script_pubkey, EcdsaSighashType::All as u32)
.map_err(|_| ())?;
let sig = self.secp.sign_ecdsa(&secp256k1::Message::from_digest(sighash.to_byte_array()), &self.secret_key);
let bitcoin_sig = bitcoin::ecdsa::Signature { sig, hash_ty: EcdsaSighashType::All };
let signature = self.secp.sign_ecdsa(&secp256k1::Message::from_digest(sighash.to_byte_array()), &self.secret_key);
let bitcoin_sig = bitcoin::ecdsa::Signature { signature, sighash_type: EcdsaSighashType::All };
tx.input[i].script_sig = Builder::new()
.push_slice(&bitcoin_sig.serialize())
.push_slice(&self.secret_key.public_key(&self.secp).serialize())

View file

@ -45,7 +45,7 @@ pub(crate) fn maybe_add_change_output(tx: &mut Transaction, input_value: Amount,
if output_value >= input_value { return Err(()); }
}
let dust_value = change_destination_script.dust_value();
let dust_value = change_destination_script.minimal_non_dust();
let mut change_output = TxOut {
script_pubkey: change_destination_script,
value: Amount::ZERO,
@ -227,27 +227,27 @@ mod tests {
fn test_tx_change_edge() {
// Check that we never add dust outputs
let mut tx = Transaction { version: Version::TWO, lock_time: LockTime::ZERO, input: Vec::new(), output: Vec::new() };
let orig_wtxid = tx.wtxid();
let orig_wtxid = tx.compute_wtxid();
let output_spk = ScriptBuf::new_p2pkh(&PubkeyHash::hash(&[0; 0]));
assert_eq!(output_spk.dust_value().to_sat(), 546);
assert_eq!(output_spk.minimal_non_dust().to_sat(), 546);
// base size = version size + varint[input count] + input size + varint[output count] + output size + lock time size
// total size = version size + marker + flag + varint[input count] + input size + varint[output count] + output size + lock time size
// weight = 3 * base size + total size = 3 * (4 + 1 + 0 + 1 + 0 + 4) + (4 + 1 + 1 + 1 + 0 + 1 + 0 + 4) = 3 * 10 + 12 = 42
assert_eq!(tx.weight().to_wu(), 42);
// 10 sats isn't enough to pay fee on a dummy transaction...
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(10), 0, 250, output_spk.clone()).is_err());
assert_eq!(tx.wtxid(), orig_wtxid); // Failure doesn't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // Failure doesn't change the transaction
// but 11 (= ceil(42 * 250 / 1000)) is, just not enough to add a change output...
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(11), 0, 250, output_spk.clone()).is_ok());
assert_eq!(tx.output.len(), 0);
assert_eq!(tx.wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(549), 0, 250, output_spk.clone()).is_ok());
assert_eq!(tx.output.len(), 0);
assert_eq!(tx.wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
// 590 is also not enough
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(590), 0, 250, output_spk.clone()).is_ok());
assert_eq!(tx.output.len(), 0);
assert_eq!(tx.wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
// at 591 we can afford the change output at the dust limit (546)
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(591), 0, 250, output_spk.clone()).is_ok());
assert_eq!(tx.output.len(), 1);
@ -256,7 +256,7 @@ mod tests {
assert_eq!(tx.weight().to_wu() / 4, 590-546); // New weight is exactly the fee we wanted.
tx.output.pop();
assert_eq!(tx.wtxid(), orig_wtxid); // The only change is the addition of one output.
assert_eq!(tx.compute_wtxid(), orig_wtxid); // The only change is the addition of one output.
}
#[test]
@ -267,21 +267,21 @@ mod tests {
}], output: vec![TxOut {
script_pubkey: Builder::new().push_int(1).into_script(), value: Amount::from_sat(1000)
}] };
let orig_wtxid = tx.wtxid();
let orig_wtxid = tx.compute_wtxid();
let orig_weight = tx.weight().to_wu();
assert_eq!(orig_weight / 4, 61);
assert_eq!(Builder::new().push_int(2).into_script().dust_value().to_sat(), 474);
assert_eq!(Builder::new().push_int(2).into_script().minimal_non_dust().to_sat(), 474);
// Input value of the output value + fee - 1 should fail:
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(1000 + 61 + 100 - 1), 400, 250, Builder::new().push_int(2).into_script()).is_err());
assert_eq!(tx.wtxid(), orig_wtxid); // Failure doesn't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // Failure doesn't change the transaction
// but one more input sat should succeed, without changing the transaction
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(1000 + 61 + 100), 400, 250, Builder::new().push_int(2).into_script()).is_ok());
assert_eq!(tx.wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
// In order to get a change output, we need to add 474 plus the output's weight / 4 (10)...
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(1000 + 61 + 100 + 474 + 9), 400, 250, Builder::new().push_int(2).into_script()).is_ok());
assert_eq!(tx.wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert_eq!(tx.compute_wtxid(), orig_wtxid); // If we don't add an output, we don't change the transaction
assert!(maybe_add_change_output(&mut tx, Amount::from_sat(1000 + 61 + 100 + 474 + 10), 400, 250, Builder::new().push_int(2).into_script()).is_ok());
assert_eq!(tx.output.len(), 2);
@ -289,6 +289,6 @@ mod tests {
assert_eq!(tx.output[1].script_pubkey, Builder::new().push_int(2).into_script());
assert_eq!(tx.weight().to_wu() - orig_weight, 40); // Weight difference matches what we had to add above
tx.output.pop();
assert_eq!(tx.wtxid(), orig_wtxid); // The only change is the addition of one output.
assert_eq!(tx.compute_wtxid(), orig_wtxid); // The only change is the addition of one output.
}
}