mirror of
https://github.com/romanz/electrs.git
synced 2025-02-24 06:57:53 +01:00
Bunch of Vec<Row> translated to Iterator<Item=Row>
There were several places where `Vec<Row>` was created just to iterate it later and throw away, thus allocating memory unnecessarily. This translates those `Vec`s to `Iterator`s in order to improve the efficiency.
This commit is contained in:
parent
9df66d97e3
commit
8199563e0f
5 changed files with 26 additions and 32 deletions
|
@ -27,7 +27,7 @@ impl App {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_store(&self) -> &store::WriteStore {
|
fn write_store(&self) -> &impl store::WriteStore {
|
||||||
&self.store
|
&self.store
|
||||||
}
|
}
|
||||||
// TODO: use index for queries.
|
// TODO: use index for queries.
|
||||||
|
|
|
@ -13,7 +13,7 @@ impl ReadStore for FakeStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WriteStore for FakeStore {
|
impl WriteStore for FakeStore {
|
||||||
fn write(&self, _rows: Vec<Row>) {}
|
fn write<I: IntoIterator<Item=Row>>(&self, _rows: I) {}
|
||||||
fn flush(&self) {}
|
fn flush(&self) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
44
src/index.rs
44
src/index.rs
|
@ -170,38 +170,35 @@ pub fn compute_script_hash(data: &[u8]) -> FullHash {
|
||||||
hash
|
hash
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_transaction(txn: &Transaction, height: usize, rows: &mut Vec<Row>) {
|
pub fn index_transaction<'a>(txn: &'a Transaction, height: usize) -> impl 'a + Iterator<Item=Row> {
|
||||||
let null_hash = Sha256dHash::default();
|
let null_hash = Sha256dHash::default();
|
||||||
let txid: Sha256dHash = txn.txid();
|
let txid: Sha256dHash = txn.txid();
|
||||||
for input in &txn.input {
|
|
||||||
|
let inputs = txn.input.iter().filter_map(move |input| {
|
||||||
if input.previous_output.txid == null_hash {
|
if input.previous_output.txid == null_hash {
|
||||||
continue;
|
None
|
||||||
|
} else {
|
||||||
|
Some(TxInRow::new(&txid, &input).to_row())
|
||||||
}
|
}
|
||||||
rows.push(TxInRow::new(&txid, &input).to_row());
|
});
|
||||||
}
|
let outputs = txn.output.iter().map(move |output| TxOutRow::new(&txid, &output).to_row());
|
||||||
for output in &txn.output {
|
|
||||||
rows.push(TxOutRow::new(&txid, &output).to_row());
|
|
||||||
}
|
|
||||||
// Persist transaction ID and confirmed height
|
// Persist transaction ID and confirmed height
|
||||||
rows.push(TxRow::new(&txid, height as u32).to_row());
|
inputs.chain(outputs).chain(std::iter::once(TxRow::new(&txid, height as u32).to_row()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_block(block: &Block, height: usize) -> Vec<Row> {
|
pub fn index_block<'a>(block: &'a Block, height: usize) -> impl 'a + Iterator<Item=Row> {
|
||||||
let mut rows = vec![];
|
|
||||||
for txn in &block.txdata {
|
|
||||||
index_transaction(&txn, height, &mut rows);
|
|
||||||
}
|
|
||||||
let blockhash = block.bitcoin_hash();
|
let blockhash = block.bitcoin_hash();
|
||||||
// Persist block hash and header
|
// Persist block hash and header
|
||||||
rows.push(Row {
|
let row = Row {
|
||||||
key: bincode::serialize(&BlockKey {
|
key: bincode::serialize(&BlockKey {
|
||||||
code: b'B',
|
code: b'B',
|
||||||
hash: full_hash(&blockhash[..]),
|
hash: full_hash(&blockhash[..]),
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
value: serialize(&block.header),
|
value: serialize(&block.header),
|
||||||
});
|
};
|
||||||
rows
|
block.txdata.iter().flat_map(move |txn| index_transaction(&txn, height)).chain(std::iter::once(row))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn last_indexed_block(blockhash: &Sha256dHash) -> Row {
|
pub fn last_indexed_block(blockhash: &Sha256dHash) -> Row {
|
||||||
|
@ -361,7 +358,7 @@ impl Index {
|
||||||
.cloned()
|
.cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update(&self, store: &WriteStore, waiter: &Waiter) -> Result<Sha256dHash> {
|
pub fn update(&self, store: &impl WriteStore, waiter: &Waiter) -> Result<Sha256dHash> {
|
||||||
let daemon = self.daemon.reconnect()?;
|
let daemon = self.daemon.reconnect()?;
|
||||||
let tip = daemon.getbestblockhash()?;
|
let tip = daemon.getbestblockhash()?;
|
||||||
let new_headers: Vec<HeaderEntry> = {
|
let new_headers: Vec<HeaderEntry> = {
|
||||||
|
@ -401,20 +398,19 @@ impl Index {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut rows = vec![];
|
let rows = batch.iter().flat_map(|block| {
|
||||||
for block in &batch {
|
|
||||||
let blockhash = block.bitcoin_hash();
|
let blockhash = block.bitcoin_hash();
|
||||||
let height = *height_map
|
let height = *height_map
|
||||||
.get(&blockhash)
|
.get(&blockhash)
|
||||||
.unwrap_or_else(|| panic!("missing header for block {}", blockhash));
|
.unwrap_or_else(|| panic!("missing header for block {}", blockhash));
|
||||||
|
|
||||||
let timer = self.stats.start_timer("index");
|
let timer = self.stats.start_timer("index");
|
||||||
let mut block_rows = index_block(block, height);
|
let block_rows = index_block(block, height);
|
||||||
block_rows.push(last_indexed_block(&blockhash));
|
|
||||||
rows.extend(block_rows);
|
|
||||||
timer.observe_duration();
|
timer.observe_duration();
|
||||||
self.stats.update(block, height);
|
self.stats.update(block, height);
|
||||||
}
|
block_rows.chain(std::iter::once(last_indexed_block(&blockhash)))
|
||||||
|
});
|
||||||
|
|
||||||
let timer = self.stats.start_timer("write");
|
let timer = self.stats.start_timer("write");
|
||||||
store.write(rows);
|
store.write(rows);
|
||||||
timer.observe_duration();
|
timer.observe_duration();
|
||||||
|
|
|
@ -29,8 +29,7 @@ impl MempoolStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add(&mut self, tx: &Transaction) {
|
fn add(&mut self, tx: &Transaction) {
|
||||||
let mut rows = vec![];
|
let rows = index_transaction(tx, 0);
|
||||||
index_transaction(tx, 0, &mut rows);
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let (key, value) = row.into_pair();
|
let (key, value) = row.into_pair();
|
||||||
self.map.entry(key).or_insert_with(|| vec![]).push(value);
|
self.map.entry(key).or_insert_with(|| vec![]).push(value);
|
||||||
|
@ -38,8 +37,7 @@ impl MempoolStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, tx: &Transaction) {
|
fn remove(&mut self, tx: &Transaction) {
|
||||||
let mut rows = vec![];
|
let rows = index_transaction(tx, 0);
|
||||||
index_transaction(tx, 0, &mut rows);
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let (key, value) = row.into_pair();
|
let (key, value) = row.into_pair();
|
||||||
let no_values_left = {
|
let no_values_left = {
|
||||||
|
|
|
@ -21,7 +21,7 @@ pub trait ReadStore: Sync {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait WriteStore: Sync {
|
pub trait WriteStore: Sync {
|
||||||
fn write(&self, rows: Vec<Row>);
|
fn write<I: IntoIterator<Item=Row>>(&self, rows: I);
|
||||||
fn flush(&self);
|
fn flush(&self);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ impl ReadStore for DBStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WriteStore for DBStore {
|
impl WriteStore for DBStore {
|
||||||
fn write(&self, rows: Vec<Row>) {
|
fn write<I: IntoIterator<Item=Row>>(&self, rows: I) {
|
||||||
let mut batch = rocksdb::WriteBatch::default();
|
let mut batch = rocksdb::WriteBatch::default();
|
||||||
for row in rows {
|
for row in rows {
|
||||||
batch.put(row.key.as_slice(), row.value.as_slice()).unwrap();
|
batch.put(row.key.as_slice(), row.value.as_slice()).unwrap();
|
||||||
|
|
Loading…
Add table
Reference in a new issue