mirror of
https://github.com/romanz/electrs.git
synced 2024-11-19 01:43:29 +01:00
Replace most of HTTP GETs with JSONRPC calls
This commit is contained in:
parent
1b2a84c0ff
commit
039276bff2
@ -11,7 +11,7 @@ bincode = "1.0.0"
|
||||
bitcoin = "0.12"
|
||||
crossbeam = "0.3.2"
|
||||
error-chain = "0.11"
|
||||
extfmt = "0.1"
|
||||
hex = "0.3"
|
||||
itertools = "0.7.8"
|
||||
log = "0.4"
|
||||
pbr = "1.0.0"
|
||||
|
113
src/daemon.rs
113
src/daemon.rs
@ -1,9 +1,11 @@
|
||||
use base64;
|
||||
use bitcoin::blockdata::block::BlockHeader;
|
||||
use bitcoin::blockdata::block::{Block, BlockHeader};
|
||||
use bitcoin::blockdata::transaction::Transaction;
|
||||
use bitcoin::network::encodable::ConsensusDecodable;
|
||||
use bitcoin::network::serialize::BitcoinHash;
|
||||
use bitcoin::network::serialize::deserialize;
|
||||
use bitcoin::network::serialize::RawDecoder;
|
||||
use hex;
|
||||
use reqwest;
|
||||
use serde_json::{from_str, Value};
|
||||
use std::env::home_dir;
|
||||
@ -21,8 +23,9 @@ const HEADER_SIZE: usize = 80;
|
||||
fn read_cookie() -> Vec<u8> {
|
||||
let mut path = home_dir().unwrap();
|
||||
path.push(".bitcoin");
|
||||
path.push("testnet3");
|
||||
path.push(".cookie");
|
||||
read_contents(&path).unwrap()
|
||||
read_contents(&path).expect("failed to read cookie")
|
||||
}
|
||||
|
||||
pub struct Daemon {
|
||||
@ -39,7 +42,7 @@ impl Daemon {
|
||||
}
|
||||
|
||||
// TODO: use error_chain for errors here.
|
||||
fn request(&self, resource: &str) -> Result<reqwest::Response> {
|
||||
fn call_http(&self, resource: &str) -> Result<reqwest::Response> {
|
||||
let url = format!("{}/rest/{}", self.url, resource);
|
||||
Ok(reqwest::get(&url)
|
||||
.chain_err(|| format!("failed to get {}", url))?
|
||||
@ -49,15 +52,15 @@ impl Daemon {
|
||||
|
||||
pub fn get(&self, resource: &str) -> Result<Bytes> {
|
||||
let mut buf = Bytes::new();
|
||||
let mut resp = self.request(resource)?;
|
||||
let mut resp = self.call_http(resource)?;
|
||||
resp.copy_to(&mut buf)
|
||||
.chain_err(|| "failed to read response")?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn call(&self, method: &str, params: Value) -> Result<Value> {
|
||||
let mut conn = TcpStream::connect("127.0.0.1:8332").chain_err(|| "failed to connect")?;
|
||||
let request = json!({"method": method, "params": params}).to_string();
|
||||
fn call_jsonrpc(&self, request: &Value) -> Result<Value> {
|
||||
let mut conn = TcpStream::connect("127.0.0.1:18332").chain_err(|| "failed to connect")?;
|
||||
let request = request.to_string();
|
||||
let msg = format!(
|
||||
"POST / HTTP/1.1\nAuthorization: Basic {}\nContent-Length: {}\n\n{}",
|
||||
self.cookie_b64,
|
||||
@ -79,19 +82,91 @@ impl Daemon {
|
||||
}
|
||||
}
|
||||
let contents = contents.chain_err(|| "no reply")?;
|
||||
let mut reply: Value = from_str(&contents).chain_err(|| "invalid JSON")?;
|
||||
let reply: Value = from_str(&contents).chain_err(|| "invalid JSON")?;
|
||||
Ok(reply)
|
||||
}
|
||||
|
||||
fn request(&self, method: &str, params: Value) -> Result<Value> {
|
||||
let req = json!({"method": method, "params": params});
|
||||
let mut reply = self.call_jsonrpc(&req)
|
||||
.chain_err(|| format!("RPC failed: {}", req))?;
|
||||
let err = reply["error"].take();
|
||||
if !err.is_null() {
|
||||
bail!("called failed: {}", err);
|
||||
bail!("{} RPC error: {}", method, err);
|
||||
}
|
||||
Ok(reply["result"].take())
|
||||
}
|
||||
|
||||
fn requests(&self, method: &str, params_list: &[Value]) -> Result<Vec<Value>> {
|
||||
let reqs = params_list
|
||||
.iter()
|
||||
.map(|params| json!({"method": method, "params": params}))
|
||||
.collect();
|
||||
let mut result = Vec::new();
|
||||
for reply in self.call_jsonrpc(&reqs)
|
||||
.chain_err(|| format!("RPC failed: {}", reqs))?
|
||||
.as_array_mut()
|
||||
.chain_err(|| "non-array response")?
|
||||
{
|
||||
let err = reply["error"].take();
|
||||
if !err.is_null() {
|
||||
bail!("{} RPC error: {}", method, err);
|
||||
}
|
||||
result.push(reply["result"].take())
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
// bitcoind JSONRPC API:
|
||||
|
||||
pub fn getbestblockhash(&self) -> Result<Sha256dHash> {
|
||||
let reply = self.request("getbestblockhash", json!([]))?;
|
||||
Ok(
|
||||
Sha256dHash::from_hex(reply.as_str().chain_err(|| "non-string bestblockhash")?)
|
||||
.chain_err(|| "non-hex bestblockhash")?,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn getblockheader(&self, blockhash: &Sha256dHash) -> Result<BlockHeader> {
|
||||
let header_hex: Value = self.request(
|
||||
"getblockheader",
|
||||
json!([blockhash.be_hex_string(), /*verbose=*/ false]),
|
||||
)?;
|
||||
Ok(deserialize(
|
||||
&hex::decode(header_hex.as_str().chain_err(|| "non-string header")?)
|
||||
.chain_err(|| "non-hex header")?,
|
||||
).chain_err(|| format!("failed to parse blockheader {}", blockhash))?)
|
||||
}
|
||||
|
||||
pub fn getblock(&self, blockhash: &Sha256dHash) -> Result<Block> {
|
||||
let block_hex: Value = self.request(
|
||||
"getblock",
|
||||
json!([blockhash.be_hex_string(), /*verbose=*/ false]),
|
||||
)?;
|
||||
Ok(deserialize(
|
||||
&hex::decode(block_hex.as_str().chain_err(|| "non-string block")?)
|
||||
.chain_err(|| "non-hex block")?,
|
||||
).chain_err(|| format!("failed to parse block {}", blockhash))?)
|
||||
}
|
||||
|
||||
pub fn gettransaction(&self, txhash: &Sha256dHash) -> Result<Transaction> {
|
||||
let tx_hex: Value = self.request(
|
||||
"getrawtransaction",
|
||||
json!([txhash.be_hex_string(), /*verbose=*/ false]),
|
||||
)?;
|
||||
Ok(
|
||||
deserialize(&hex::decode(tx_hex.as_str().chain_err(|| "non-string tx")?)
|
||||
.chain_err(|| "non-hex tx")?)
|
||||
.chain_err(|| format!("failed to parse tx {}", txhash))?,
|
||||
)
|
||||
}
|
||||
|
||||
fn get_all_headers(&self) -> Result<HeaderMap> {
|
||||
let mut headers = HeaderMap::new();
|
||||
let genesis_blockhash = self.request("getblockhash", json!([0]))?;
|
||||
let mut blockhash = Sha256dHash::from_hex(
|
||||
"000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f",
|
||||
).unwrap(); // genesis block hash
|
||||
genesis_blockhash.as_str().expect("non-string blockhash"),
|
||||
).expect("non-hex blockhash");
|
||||
loop {
|
||||
let data = self.get(&format!("headers/2000/{}.bin", blockhash.be_hex_string()))
|
||||
.chain_err(|| "failed to get headers")?;
|
||||
@ -113,16 +188,13 @@ impl Daemon {
|
||||
|
||||
fn add_missing_headers(&self, mut header_map: HeaderMap) -> Result<(HeaderMap, Sha256dHash)> {
|
||||
// Get current best blockhash (using JSONRPC API)
|
||||
let reply = self.call("getbestblockhash", json!([]))?;
|
||||
let bestblockhash =
|
||||
Sha256dHash::from_hex(reply.as_str().chain_err(|| "non-string bestblockhash")?)
|
||||
.chain_err(|| "non-hex bestblockhash")?;
|
||||
let bestblockhash = self.getbestblockhash()?;
|
||||
// Iterate back over headers until known blockash is found:
|
||||
let mut blockhash = bestblockhash;
|
||||
while !header_map.contains_key(&blockhash) {
|
||||
let data = self.get(&format!("headers/1/{}.bin", blockhash.be_hex_string()))?;
|
||||
let header: BlockHeader = deserialize(&data)
|
||||
.chain_err(|| format!("failed to parse blockheader {}", blockhash))?;
|
||||
let nullhash = Sha256dHash::default();
|
||||
while !header_map.contains_key(&blockhash) && blockhash != nullhash {
|
||||
let header = self.getblockheader(&blockhash)
|
||||
.chain_err(|| "failed to get missing headers")?;
|
||||
header_map.insert(blockhash, header);
|
||||
blockhash = header.prev_blockhash;
|
||||
}
|
||||
@ -130,14 +202,17 @@ impl Daemon {
|
||||
}
|
||||
|
||||
pub fn enumerate_headers(&self, indexed_headers: &HeaderList) -> Result<HeaderList> {
|
||||
info!("loading headers");
|
||||
let header_map = if indexed_headers.headers().is_empty() {
|
||||
self.get_all_headers()
|
||||
.chain_err(|| "failed to download all headers")?
|
||||
} else {
|
||||
indexed_headers.as_map()
|
||||
};
|
||||
info!("loaded headers");
|
||||
let (header_map, blockhash) = self.add_missing_headers(header_map)
|
||||
.chain_err(|| "failed to add missing headers")?;
|
||||
info!("added missing headers");
|
||||
Ok(HeaderList::build(header_map, blockhash))
|
||||
}
|
||||
}
|
||||
|
23
src/index.rs
23
src/index.rs
@ -16,7 +16,7 @@ use std::time::{Duration, Instant};
|
||||
use time;
|
||||
|
||||
use store::{Row, Store};
|
||||
use types::{Bytes, HeaderMap};
|
||||
use types::HeaderMap;
|
||||
use daemon::Daemon;
|
||||
|
||||
// TODO: consolidate serialization/deserialize code for bincode/bitcoin.
|
||||
@ -281,29 +281,22 @@ impl<'a> Iterator for Indexer<'a> {
|
||||
}
|
||||
let &entry = &self.headers[self.header_index];
|
||||
|
||||
let &blockhash = entry.hash();
|
||||
let blockhash_hex = blockhash.be_hex_string();
|
||||
|
||||
let buf: Bytes = self.daemon
|
||||
.get(&format!("block/{}.bin", blockhash_hex))
|
||||
.unwrap();
|
||||
|
||||
let block: Block = deserialize(&buf).unwrap();
|
||||
assert_eq!(block.bitcoin_hash(), blockhash);
|
||||
let blockhash = entry.hash();
|
||||
let block = self.daemon.getblock(&blockhash).unwrap();
|
||||
assert_eq!(block.bitcoin_hash(), *blockhash);
|
||||
self.blocks_size += serialize(&block).unwrap().len();
|
||||
|
||||
let rows = index_block(&block, entry.height());
|
||||
|
||||
self.blocks_size += buf.len();
|
||||
self.num_of_rows += rows.len();
|
||||
for row in &rows {
|
||||
self.rows_size += row.key.len() + row.value.len();
|
||||
}
|
||||
self.num_of_rows += rows.len();
|
||||
self.header_index += 1;
|
||||
|
||||
self.header_index += 1;
|
||||
if self.header_index % 1000 == 0 {
|
||||
info!(
|
||||
"{} @ {}: {:.3}/{:.3} MB, {} rows",
|
||||
blockhash_hex,
|
||||
blockhash,
|
||||
entry.height(),
|
||||
self.rows_size as f64 / 1e6_f64,
|
||||
self.blocks_size as f64 / 1e6_f64,
|
||||
|
@ -5,7 +5,7 @@ extern crate bincode;
|
||||
extern crate bitcoin;
|
||||
extern crate crossbeam;
|
||||
extern crate crypto;
|
||||
extern crate extfmt;
|
||||
extern crate hex;
|
||||
extern crate itertools;
|
||||
extern crate pbr;
|
||||
extern crate reqwest;
|
||||
|
18
src/query.rs
18
src/query.rs
@ -9,7 +9,6 @@ use daemon::Daemon;
|
||||
use index::{compute_script_hash, hash_prefix, HashPrefix, HeaderEntry, Index, TxInKey, TxInRow,
|
||||
TxKey, TxOutRow, HASH_PREFIX_LEN};
|
||||
use store::Store;
|
||||
use types::Bytes;
|
||||
|
||||
pub struct Query<'a> {
|
||||
store: &'a Store,
|
||||
@ -46,6 +45,7 @@ fn merklize(left: Sha256dHash, right: Sha256dHash) -> Sha256dHash {
|
||||
Sha256dHash::from_data(&data)
|
||||
}
|
||||
|
||||
// TODO: return errors instead of panics
|
||||
impl<'a> Query<'a> {
|
||||
pub fn new(store: &'a Store, daemon: &'a Daemon, index: &'a Index) -> Query<'a> {
|
||||
Query {
|
||||
@ -61,10 +61,7 @@ impl<'a> Query<'a> {
|
||||
for row in self.store.scan(&[b"T", &txid_prefix[..]].concat()) {
|
||||
let key: TxKey = bincode::deserialize(&row.key).unwrap();
|
||||
let txid: Sha256dHash = deserialize(&key.txid).unwrap();
|
||||
let txn_bytes = self.daemon
|
||||
.get(&format!("tx/{}.bin", txid.be_hex_string()))
|
||||
.unwrap();
|
||||
let txn: Transaction = deserialize(&txn_bytes).unwrap();
|
||||
let txn: Transaction = self.get_tx(&txid);
|
||||
let height: u32 = bincode::deserialize(&row.value).unwrap();
|
||||
txns.push(TxnHeight { txn, height })
|
||||
}
|
||||
@ -152,10 +149,10 @@ impl<'a> Query<'a> {
|
||||
status
|
||||
}
|
||||
|
||||
pub fn get_tx(&self, tx_hash: &Sha256dHash) -> Bytes {
|
||||
pub fn get_tx(&self, tx_hash: &Sha256dHash) -> Transaction {
|
||||
self.daemon
|
||||
.get(&format!("tx/{}.bin", tx_hash.be_hex_string()))
|
||||
.unwrap()
|
||||
.gettransaction(tx_hash)
|
||||
.expect(&format!("failed to load tx {}", tx_hash))
|
||||
}
|
||||
|
||||
pub fn get_headers(&self, heights: &[usize]) -> Vec<BlockHeader> {
|
||||
@ -185,10 +182,7 @@ impl<'a> Query<'a> {
|
||||
) -> Option<(Vec<Sha256dHash>, usize)> {
|
||||
let header_list = self.index.headers_list();
|
||||
let blockhash = header_list.headers().get(height)?.hash();
|
||||
let buf = self.daemon
|
||||
.get(&format!("block/{}.bin", blockhash.be_hex_string()))
|
||||
.unwrap();
|
||||
let block: Block = deserialize(&buf).unwrap();
|
||||
let block: Block = self.daemon.getblock(&blockhash).unwrap();
|
||||
let mut txids: Vec<Sha256dHash> = block.txdata.iter().map(|tx| tx.txid()).collect();
|
||||
let pos = txids.iter().position(|txid| txid == tx_hash)?;
|
||||
let mut merkle = Vec::new();
|
||||
|
10
src/rpc.rs
10
src/rpc.rs
@ -4,6 +4,7 @@ use bitcoin::util::hash::Sha256dHash;
|
||||
use crossbeam;
|
||||
use crypto::digest::Digest;
|
||||
use crypto::sha2::Sha256;
|
||||
use hex;
|
||||
use itertools;
|
||||
use serde_json::{from_str, Number, Value};
|
||||
use std::collections::HashMap;
|
||||
@ -13,7 +14,6 @@ use std::sync::mpsc::{sync_channel, Receiver, SyncSender};
|
||||
|
||||
use query::{Query, Status};
|
||||
use index::FullHash;
|
||||
use util;
|
||||
|
||||
error_chain!{}
|
||||
|
||||
@ -52,7 +52,7 @@ fn hash_from_status(status: &Status) -> Value {
|
||||
sha2.input(part.as_bytes());
|
||||
}
|
||||
sha2.result(&mut hash);
|
||||
Value::String(util::hexlify(&hash))
|
||||
Value::String(hex::encode(&hash))
|
||||
}
|
||||
|
||||
fn jsonify_header(header: &BlockHeader, height: usize) -> Value {
|
||||
@ -119,7 +119,7 @@ impl<'a> Handler<'a> {
|
||||
let result = itertools::join(
|
||||
headers
|
||||
.into_iter()
|
||||
.map(|x| util::hexlify(&serialize(&x).unwrap())),
|
||||
.map(|x| hex::encode(&serialize(&x).unwrap())),
|
||||
"",
|
||||
);
|
||||
Ok(json!(result))
|
||||
@ -168,8 +168,8 @@ impl<'a> Handler<'a> {
|
||||
fn blockchain_transaction_get(&self, params: &[Value]) -> Result<Value> {
|
||||
// TODO: handle 'verbose' param
|
||||
let tx_hash = hash_from_value(params.get(0)).chain_err(|| "bad tx_hash")?;
|
||||
let tx_hex = util::hexlify(&self.query.get_tx(&tx_hash));
|
||||
Ok(json!(tx_hex))
|
||||
let tx = self.query.get_tx(&tx_hash);
|
||||
Ok(json!(hex::encode(&serialize(&tx).unwrap())))
|
||||
}
|
||||
|
||||
fn blockchain_transaction_get_merkle(&self, params: &[Value]) -> Result<Value> {
|
||||
|
@ -1,13 +1,8 @@
|
||||
use extfmt::Hexlify;
|
||||
use std::fs::File;
|
||||
use std::io::Result;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn hexlify(blob: &[u8]) -> String {
|
||||
format!("{}", Hexlify(blob))
|
||||
}
|
||||
|
||||
pub fn read_contents<P: AsRef<Path>>(path: P) -> Result<Vec<u8>> {
|
||||
let mut buf = Vec::new();
|
||||
File::open(path)?.read_to_end(&mut buf)?;
|
||||
|
Loading…
Reference in New Issue
Block a user