mirror of
https://github.com/romanz/electrs.git
synced 2025-02-24 23:08:39 +01:00
Remove unnecessary borrow
Clippy emits: warning: this expression borrows a reference As suggested remove the unnecessary borrow.
This commit is contained in:
parent
9e33cb6d9b
commit
e94ff8b9a1
7 changed files with 29 additions and 31 deletions
|
@ -45,7 +45,7 @@ impl App {
|
|||
let mut tip = self.tip.lock().expect("failed to lock tip");
|
||||
let new_block = *tip != self.daemon().getbestblockhash()?;
|
||||
if new_block {
|
||||
*tip = self.index().update(self.write_store(), &signal)?;
|
||||
*tip = self.index().update(self.write_store(), signal)?;
|
||||
}
|
||||
Ok(new_block)
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ impl TransactionCache {
|
|||
F: FnOnce() -> Result<Vec<u8>>,
|
||||
{
|
||||
if let Some(serialized_txn) = self.map.lock().unwrap().get(txid) {
|
||||
return Ok(deserialize(&serialized_txn).chain_err(|| "failed to parse cached tx")?);
|
||||
return Ok(deserialize(serialized_txn).chain_err(|| "failed to parse cached tx")?);
|
||||
}
|
||||
let serialized_txn = load_txn_func()?;
|
||||
let txn = deserialize(&serialized_txn).chain_err(|| "failed to parse serialized tx")?;
|
||||
|
|
|
@ -513,7 +513,7 @@ impl Daemon {
|
|||
|
||||
pub fn getblocktxids(&self, blockhash: &BlockHash) -> Result<Vec<Txid>> {
|
||||
self.blocktxids_cache
|
||||
.get_or_else(&blockhash, || self.load_blocktxids(blockhash))
|
||||
.get_or_else(blockhash, || self.load_blocktxids(blockhash))
|
||||
}
|
||||
|
||||
pub fn gettransaction(
|
||||
|
@ -545,7 +545,7 @@ impl Daemon {
|
|||
let txids: Value = self.request("getrawmempool", json!([/*verbose=*/ false]))?;
|
||||
let mut result = HashSet::new();
|
||||
for value in txids.as_array().chain_err(|| "non-array result")? {
|
||||
result.insert(parse_hash(&value).chain_err(|| "invalid txid")?);
|
||||
result.insert(parse_hash(value).chain_err(|| "invalid txid")?);
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
@ -589,7 +589,7 @@ impl Daemon {
|
|||
let null_hash = BlockHash::default();
|
||||
for heights in all_heights.chunks(chunk_size) {
|
||||
trace!("downloading {} block headers", heights.len());
|
||||
let mut headers = self.getblockheaders(&heights)?;
|
||||
let mut headers = self.getblockheaders(heights)?;
|
||||
assert!(headers.len() == heights.len());
|
||||
result.append(&mut headers);
|
||||
}
|
||||
|
|
|
@ -176,13 +176,13 @@ pub fn index_transaction<'a>(
|
|||
if input.previous_output.txid == null_hash {
|
||||
None
|
||||
} else {
|
||||
Some(TxInRow::new(&txid, &input).to_row())
|
||||
Some(TxInRow::new(&txid, input).to_row())
|
||||
}
|
||||
});
|
||||
let outputs = txn
|
||||
.output
|
||||
.iter()
|
||||
.map(move |output| TxOutRow::new(&txid, &output).to_row());
|
||||
.map(move |output| TxOutRow::new(&txid, output).to_row());
|
||||
|
||||
// Persist transaction ID and confirmed height
|
||||
inputs
|
||||
|
@ -204,7 +204,7 @@ pub fn index_block<'a>(block: &'a Block, height: usize) -> impl 'a + Iterator<It
|
|||
block
|
||||
.txdata
|
||||
.iter()
|
||||
.flat_map(move |txn| index_transaction(&txn, height))
|
||||
.flat_map(move |txn| index_transaction(txn, height))
|
||||
.chain(std::iter::once(row))
|
||||
}
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ impl Metrics {
|
|||
self.addr, e
|
||||
)
|
||||
});
|
||||
start_process_exporter(&self);
|
||||
start_process_exporter(self);
|
||||
let reg = self.reg.clone();
|
||||
spawn_thread("metrics", move || loop {
|
||||
if let Err(e) = handle_request(®, server.recv()) {
|
||||
|
|
16
src/query.rs
16
src/query.rs
|
@ -165,7 +165,7 @@ fn create_merkle_branch_and_root<T: Hash>(mut hashes: Vec<T>, mut index: usize)
|
|||
|
||||
// TODO: the functions below can be part of ReadStore.
|
||||
fn txrow_by_txid(store: &dyn ReadStore, txid: &Txid) -> Option<TxRow> {
|
||||
let key = TxRow::filter_full(&txid);
|
||||
let key = TxRow::filter_full(txid);
|
||||
let value = store.get(&key)?;
|
||||
Some(TxRow::from_row(&Row { key, value }))
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ fn txids_by_funding_output(
|
|||
output_index: usize,
|
||||
) -> Vec<HashPrefix> {
|
||||
store
|
||||
.scan(&TxInRow::filter(&txn_id, output_index))
|
||||
.scan(&TxInRow::filter(txn_id, output_index))
|
||||
.iter()
|
||||
.map(|row| TxInRow::from_row(row).txid_prefix)
|
||||
.collect()
|
||||
|
@ -311,7 +311,7 @@ impl Query {
|
|||
funding.extend(self.find_funding_outputs(&t, script_hash));
|
||||
}
|
||||
for funding_output in &funding {
|
||||
if let Some(spent) = self.find_spending_input(read_store, &funding_output)? {
|
||||
if let Some(spent) = self.find_spending_input(read_store, funding_output)? {
|
||||
spending.push(spent);
|
||||
}
|
||||
}
|
||||
|
@ -332,7 +332,7 @@ impl Query {
|
|||
}
|
||||
// // TODO: dedup outputs (somehow) both confirmed and in mempool (e.g. reorg?)
|
||||
for funding_output in funding.iter().chain(confirmed_funding.iter()) {
|
||||
if let Some(spent) = self.find_spending_input(tracker.index(), &funding_output)? {
|
||||
if let Some(spent) = self.find_spending_input(tracker.index(), funding_output)? {
|
||||
spending.push(spent);
|
||||
}
|
||||
}
|
||||
|
@ -380,14 +380,14 @@ impl Query {
|
|||
tx_hash: &Txid,
|
||||
block_height: Option<u32>,
|
||||
) -> Result<Option<BlockHash>> {
|
||||
let blockhash = if self.tracker.read().unwrap().has_txn(&tx_hash) {
|
||||
let blockhash = if self.tracker.read().unwrap().has_txn(tx_hash) {
|
||||
None // found in mempool (as unconfirmed transaction)
|
||||
} else {
|
||||
// Lookup in confirmed transactions' index
|
||||
let height = match block_height {
|
||||
Some(height) => height,
|
||||
None => {
|
||||
txrow_by_txid(self.app.read_store(), &tx_hash)
|
||||
txrow_by_txid(self.app.read_store(), tx_hash)
|
||||
.chain_err(|| format!("not indexed tx {}", tx_hash))?
|
||||
.height
|
||||
}
|
||||
|
@ -405,7 +405,7 @@ impl Query {
|
|||
// Internal API for transaction retrieval
|
||||
fn load_txn(&self, txid: &Txid, block_height: Option<u32>) -> Result<Transaction> {
|
||||
let _timer = self.duration.with_label_values(&["load_txn"]).start_timer();
|
||||
self.tx_cache.get_or_else(&txid, || {
|
||||
self.tx_cache.get_or_else(txid, || {
|
||||
let blockhash = self.lookup_confirmed_blockhash(txid, block_height)?;
|
||||
let value: Value = self
|
||||
.app
|
||||
|
@ -460,7 +460,7 @@ impl Query {
|
|||
.index()
|
||||
.get_header(height)
|
||||
.chain_err(|| format!("missing block #{}", height))?;
|
||||
let txids = self.app.daemon().getblocktxids(&header_entry.hash())?;
|
||||
let txids = self.app.daemon().getblocktxids(header_entry.hash())?;
|
||||
let pos = txids
|
||||
.iter()
|
||||
.position(|txid| txid == tx_hash)
|
||||
|
|
26
src/rpc.rs
26
src/rpc.rs
|
@ -367,24 +367,22 @@ impl Connection {
|
|||
.with_label_values(&[method])
|
||||
.start_timer();
|
||||
let result = match method {
|
||||
"blockchain.block.header" => self.blockchain_block_header(¶ms),
|
||||
"blockchain.block.headers" => self.blockchain_block_headers(¶ms),
|
||||
"blockchain.estimatefee" => self.blockchain_estimatefee(¶ms),
|
||||
"blockchain.block.header" => self.blockchain_block_header(params),
|
||||
"blockchain.block.headers" => self.blockchain_block_headers(params),
|
||||
"blockchain.estimatefee" => self.blockchain_estimatefee(params),
|
||||
"blockchain.headers.subscribe" => self.blockchain_headers_subscribe(),
|
||||
"blockchain.relayfee" => self.blockchain_relayfee(),
|
||||
"blockchain.scripthash.get_balance" => self.blockchain_scripthash_get_balance(¶ms),
|
||||
"blockchain.scripthash.get_history" => self.blockchain_scripthash_get_history(¶ms),
|
||||
"blockchain.scripthash.listunspent" => self.blockchain_scripthash_listunspent(¶ms),
|
||||
"blockchain.scripthash.subscribe" => self.blockchain_scripthash_subscribe(¶ms),
|
||||
"blockchain.transaction.broadcast" => self.blockchain_transaction_broadcast(¶ms),
|
||||
"blockchain.transaction.get" => self.blockchain_transaction_get(¶ms),
|
||||
"blockchain.transaction.get_merkle" => self.blockchain_transaction_get_merkle(¶ms),
|
||||
"blockchain.scripthash.get_balance" => self.blockchain_scripthash_get_balance(params),
|
||||
"blockchain.scripthash.get_history" => self.blockchain_scripthash_get_history(params),
|
||||
"blockchain.scripthash.listunspent" => self.blockchain_scripthash_listunspent(params),
|
||||
"blockchain.scripthash.subscribe" => self.blockchain_scripthash_subscribe(params),
|
||||
"blockchain.transaction.broadcast" => self.blockchain_transaction_broadcast(params),
|
||||
"blockchain.transaction.get" => self.blockchain_transaction_get(params),
|
||||
"blockchain.transaction.get_merkle" => self.blockchain_transaction_get_merkle(params),
|
||||
"blockchain.transaction.get_confirmed_blockhash" => {
|
||||
self.blockchain_transaction_get_confirmed_blockhash(¶ms)
|
||||
}
|
||||
"blockchain.transaction.id_from_pos" => {
|
||||
self.blockchain_transaction_id_from_pos(¶ms)
|
||||
self.blockchain_transaction_get_confirmed_blockhash(params)
|
||||
}
|
||||
"blockchain.transaction.id_from_pos" => self.blockchain_transaction_id_from_pos(params),
|
||||
"mempool.get_fee_histogram" => self.mempool_get_fee_histogram(),
|
||||
"server.banner" => self.server_banner(),
|
||||
"server.donation_address" => self.server_donation_address(),
|
||||
|
|
Loading…
Add table
Reference in a new issue