diff --git a/backend/src/api/bitcoin/bitcoin-api.ts b/backend/src/api/bitcoin/bitcoin-api.ts
index 52e4cbec0..51ed99b6c 100644
--- a/backend/src/api/bitcoin/bitcoin-api.ts
+++ b/backend/src/api/bitcoin/bitcoin-api.ts
@@ -147,7 +147,7 @@ class BitcoinApi implements AbstractBitcoinApi {
scriptpubkey: vout.scriptPubKey.hex,
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
- scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.asm) : '',
+ scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
scriptpubkey_type: this.translateScriptPubKeyType(vout.scriptPubKey.type),
};
});
@@ -157,7 +157,7 @@ class BitcoinApi implements AbstractBitcoinApi {
is_coinbase: !!vin.coinbase,
prevout: null,
scriptsig: vin.scriptSig && vin.scriptSig.hex || vin.coinbase || '',
- scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.asm) || '',
+ scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.hex) || '',
sequence: vin.sequence,
txid: vin.txid || '',
vout: vin.vout || 0,
@@ -290,38 +290,68 @@ class BitcoinApi implements AbstractBitcoinApi {
return transaction;
}
- private convertScriptSigAsm(str: string): string {
- const a = str.split(' ');
+ private convertScriptSigAsm(hex: string): string {
+ const buf = Buffer.from(hex, 'hex');
+
const b: string[] = [];
- a.forEach((chunk) => {
- if (chunk.substr(0, 3) === 'OP_') {
- chunk = chunk.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1');
- chunk = chunk.replace('OP_CHECKSEQUENCEVERIFY', 'OP_CSV');
- chunk = chunk.replace('OP_CHECKLOCKTIMEVERIFY', 'OP_CLTV');
- b.push(chunk);
- } else {
- chunk = chunk.replace('[ALL]', '01');
- if (chunk === '0') {
- b.push('OP_0');
- } else if (chunk.match(/^[^0]\d*$/)) {
- const chunkInt = parseInt(chunk, 10);
- if (chunkInt < 0) {
- b.push('OP_PUSHNUM_NEG' + -chunkInt);
- } else {
- b.push('OP_PUSHNUM_' + chunk);
- }
+
+ let i = 0;
+ while (i < buf.length) {
+ const op = buf[i];
+ if (op >= 0x01 && op <= 0x4e) {
+ i++;
+ let push: number;
+ if (op === 0x4c) {
+ push = buf.readUInt8(i);
+ b.push('OP_PUSHDATA1');
+ i += 1;
+ } else if (op === 0x4d) {
+ push = buf.readUInt16LE(i);
+ b.push('OP_PUSHDATA2');
+ i += 2;
+ } else if (op === 0x4e) {
+ push = buf.readUInt32LE(i);
+ b.push('OP_PUSHDATA4');
+ i += 4;
} else {
- const dataLength = Math.round(chunk.length / 2);
- if (dataLength > 255) {
- b.push('OP_PUSHDATA2' + ' ' + chunk);
- } else if (dataLength > 75) {
- b.push('OP_PUSHDATA1' + ' ' + chunk);
+ push = op;
+ b.push('OP_PUSHBYTES_' + push);
+ }
+
+ const data = buf.slice(i, i + push);
+ if (data.length !== push) {
+ break;
+ }
+
+ b.push(data.toString('hex'));
+ i += data.length;
+ } else {
+ if (op === 0x00) {
+ b.push('OP_0');
+ } else if (op === 0x4f) {
+ b.push('OP_PUSHNUM_NEG1');
+ } else if (op === 0xb1) {
+ b.push('OP_CLTV');
+ } else if (op === 0xb2) {
+ b.push('OP_CSV');
+ } else if (op === 0xba) {
+ b.push('OP_CHECKSIGADD');
+ } else {
+ const opcode = bitcoinjs.script.toASM([ op ]);
+ if (opcode && op < 0xfd) {
+ if (/^OP_(\d+)$/.test(opcode)) {
+ b.push(opcode.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1'));
+ } else {
+ b.push(opcode);
+ }
} else {
- b.push('OP_PUSHBYTES_' + dataLength + ' ' + chunk);
+ b.push('OP_RETURN_' + op);
}
}
+ i += 1;
}
- });
+ }
+
return b.join(' ');
}
@@ -332,21 +362,21 @@ class BitcoinApi implements AbstractBitcoinApi {
if (vin.prevout.scriptpubkey_type === 'p2sh') {
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
- vin.inner_redeemscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(redeemScript, 'hex')));
+ vin.inner_redeemscript_asm = this.convertScriptSigAsm(redeemScript);
if (vin.witness && vin.witness.length > 2) {
const witnessScript = vin.witness[vin.witness.length - 1];
- vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
+ vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
const witnessScript = vin.witness[vin.witness.length - 1];
- vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
+ vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
const witnessScript = vin.witness[vin.witness.length - 2];
- vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
+ vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
diff --git a/backend/src/api/blocks.ts b/backend/src/api/blocks.ts
index 1024107d0..4402f0d37 100644
--- a/backend/src/api/blocks.ts
+++ b/backend/src/api/blocks.ts
@@ -75,9 +75,12 @@ class Blocks {
transactions.push(tx);
transactionsFetched++;
} catch (e) {
- logger.debug('Error fetching block tx: ' + (e instanceof Error ? e.message : e));
if (i === 0) {
- throw new Error('Failed to fetch Coinbase transaction: ' + txIds[i]);
+ const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
+ logger.err(msg);
+ throw new Error(msg);
+ } else {
+ logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
@@ -137,8 +140,8 @@ class Blocks {
pool = await poolsRepository.$getUnknownPool();
}
- if (!pool) { // Something is wrong with the pools table, ignore pool indexing
- logger.err('Unable to find pool, nor getting the unknown pool. Is the "pools" table empty?');
+ if (!pool) { // We should never have this situation in practise
+ logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. Check your "pools" table entries`);
return blockExtended;
}
@@ -214,11 +217,12 @@ class Blocks {
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
- logger.info(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
+ logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
const chunkSize = 10000;
let totaIndexed = await blocksRepository.$blockCount(null, null);
let indexedThisRun = 0;
+ let newlyIndexed = 0;
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
@@ -228,12 +232,11 @@ class Blocks {
const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
currentBlockHeight, endBlock);
if (missingBlockHeights.length <= 0) {
- logger.debug(`No missing blocks between #${currentBlockHeight} to #${endBlock}`);
currentBlockHeight -= chunkSize;
continue;
}
- logger.debug(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
+ logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
@@ -255,14 +258,16 @@ class Blocks {
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
+
+ newlyIndexed++;
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
currentBlockHeight -= chunkSize;
}
- logger.info('Block indexing completed');
+ logger.info(`Indexed ${newlyIndexed} blocks`);
} catch (e) {
- logger.err('An error occured in $generateBlockDatabase(). Trying again later. ' + e);
+ logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
this.blockIndexingStarted = false;
return;
}
diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts
index 3978a7d85..0e9a18220 100644
--- a/backend/src/api/database-migration.ts
+++ b/backend/src/api/database-migration.ts
@@ -1,6 +1,5 @@
-import { PoolConnection } from 'mysql2/promise';
import config from '../config';
-import { DB } from '../database';
+import DB from '../database';
import logger from '../logger';
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
@@ -77,116 +76,112 @@ class DatabaseMigration {
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
- const connection = await DB.getConnection();
try {
- await this.$executeQuery(connection, this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
- await this.$executeQuery(connection, this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
+ await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
+ await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
- await this.$executeQuery(connection, `CREATE INDEX added ON statistics (added);`);
+ await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
}
if (databaseSchemaVersion < 3) {
- await this.$executeQuery(connection, this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
+ await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
}
if (databaseSchemaVersion < 4) {
- await this.$executeQuery(connection, 'DROP table IF EXISTS blocks;');
- await this.$executeQuery(connection, this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
+ await this.$executeQuery('DROP table IF EXISTS blocks;');
+ await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
}
if (databaseSchemaVersion < 5 && isBitcoin === true) {
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
+ await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
}
if (databaseSchemaVersion < 6 && isBitcoin === true) {
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
+ await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
// Cleanup original blocks fields type
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
// We also fix the pools.id type so we need to drop/re-create the foreign key
- await this.$executeQuery(connection, 'ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
- await this.$executeQuery(connection, 'ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
+ await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
+ await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
+ await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
// Add new block indexing fields
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
- await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
+ await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
+ await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
}
if (databaseSchemaVersion < 7 && isBitcoin === true) {
- await this.$executeQuery(connection, 'DROP table IF EXISTS hashrates;');
- await this.$executeQuery(connection, this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
+ await this.$executeQuery('DROP table IF EXISTS hashrates;');
+ await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
}
if (databaseSchemaVersion < 8 && isBitcoin === true) {
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
+ await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
+ await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
+ await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
+ await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
}
if (databaseSchemaVersion < 9 && isBitcoin === true) {
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
- await this.$executeQuery(connection, 'ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
+ await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
+ await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
+ await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
}
if (databaseSchemaVersion < 10 && isBitcoin === true) {
- await this.$executeQuery(connection, 'ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
+ await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
}
if (databaseSchemaVersion < 11 && isBitcoin === true) {
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
- await this.$executeQuery(connection, `ALTER TABLE blocks
+ await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
+ await this.$executeQuery(`ALTER TABLE blocks
ADD avg_fee INT UNSIGNED NULL,
ADD avg_fee_rate INT UNSIGNED NULL
`);
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
}
if (databaseSchemaVersion < 12 && isBitcoin === true) {
// No need to re-index because the new data type can contain larger values
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
}
if (databaseSchemaVersion < 13 && isBitcoin === true) {
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
- await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
}
if (databaseSchemaVersion < 14 && isBitcoin === true) {
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
- await this.$executeQuery(connection, 'ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
+ await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
+ await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
+ await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
}
if (databaseSchemaVersion < 16 && isBitcoin === true) {
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
- await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
+ await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
}
if (databaseSchemaVersion < 17 && isBitcoin === true) {
- await this.$executeQuery(connection, 'ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
+ await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
}
-
- connection.release();
} catch (e) {
- connection.release();
throw e;
}
}
@@ -203,13 +198,11 @@ class DatabaseMigration {
return;
}
- const connection = await DB.getConnection();
-
try {
// We don't use "CREATE INDEX IF NOT EXISTS" because it is not supported on old mariadb version 5.X
const query = `SELECT COUNT(1) hasIndex FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_schema=DATABASE() AND table_name='statistics' AND index_name='added';`;
- const [rows] = await this.$executeQuery(connection, query, true);
+ const [rows] = await this.$executeQuery(query, true);
if (rows[0].hasIndex === 0) {
logger.debug('MIGRATIONS: `statistics.added` is not indexed');
this.statisticsAddedIndexed = false;
@@ -223,28 +216,24 @@ class DatabaseMigration {
logger.err('MIGRATIONS: Unable to check if `statistics.added` INDEX exist or not.');
this.statisticsAddedIndexed = true;
}
-
- connection.release();
}
/**
* Small query execution wrapper to log all executed queries
*/
- private async $executeQuery(connection: PoolConnection, query: string, silent: boolean = false): Promise {
+ private async $executeQuery(query: string, silent: boolean = false): Promise {
if (!silent) {
logger.debug('MIGRATIONS: Execute query:\n' + query);
}
- return connection.query({ sql: query, timeout: this.queryTimeout });
+ return DB.query({ sql: query, timeout: this.queryTimeout });
}
/**
* Check if 'table' exists in the database
*/
private async $checkIfTableExists(table: string): Promise {
- const connection = await DB.getConnection();
const query = `SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = '${config.DATABASE.DATABASE}' AND TABLE_NAME = '${table}'`;
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return rows[0]['COUNT(*)'] === 1;
}
@@ -252,10 +241,8 @@ class DatabaseMigration {
* Get current database version
*/
private async $getSchemaVersionFromDatabase(): Promise {
- const connection = await DB.getConnection();
const query = `SELECT number FROM state WHERE name = 'schema_version';`;
- const [rows] = await this.$executeQuery(connection, query, true);
- connection.release();
+ const [rows] = await this.$executeQuery(query, true);
return rows[0]['number'];
}
@@ -263,8 +250,6 @@ class DatabaseMigration {
* Create the `state` table
*/
private async $createMigrationStateTable(): Promise {
- const connection = await DB.getConnection();
-
try {
const query = `CREATE TABLE IF NOT EXISTS state (
name varchar(25) NOT NULL,
@@ -272,15 +257,12 @@ class DatabaseMigration {
string varchar(100) NULL,
CONSTRAINT name_unique UNIQUE (name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
- await this.$executeQuery(connection, query);
+ await this.$executeQuery(query);
// Set initial values
- await this.$executeQuery(connection, `INSERT INTO state VALUES('schema_version', 0, NULL);`);
- await this.$executeQuery(connection, `INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
-
- connection.release();
+ await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
+ await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
} catch (e) {
- connection.release();
throw e;
}
}
@@ -295,18 +277,14 @@ class DatabaseMigration {
}
transactionQueries.push(this.getUpdateToLatestSchemaVersionQuery());
- const connection = await DB.getConnection();
try {
- await this.$executeQuery(connection, 'START TRANSACTION;');
+ await this.$executeQuery('START TRANSACTION;');
for (const query of transactionQueries) {
- await this.$executeQuery(connection, query);
+ await this.$executeQuery(query);
}
- await this.$executeQuery(connection, 'COMMIT;');
-
- connection.release();
+ await this.$executeQuery('COMMIT;');
} catch (e) {
- await this.$executeQuery(connection, 'ROLLBACK;');
- connection.release();
+ await this.$executeQuery('ROLLBACK;');
throw e;
}
}
@@ -346,14 +324,12 @@ class DatabaseMigration {
* Print current database version
*/
private async $printDatabaseVersion() {
- const connection = await DB.getConnection();
try {
- const [rows] = await this.$executeQuery(connection, 'SELECT VERSION() as version;', true);
+ const [rows] = await this.$executeQuery('SELECT VERSION() as version;', true);
logger.debug(`MIGRATIONS: Database engine version '${rows[0].version}'`);
} catch (e) {
logger.debug(`MIGRATIONS: Could not fetch database engine version. ` + e);
}
- connection.release();
}
// Couple of wrappers to clean the main logic
@@ -490,24 +466,22 @@ class DatabaseMigration {
public async $truncateIndexedData(tables: string[]) {
const allowedTables = ['blocks', 'hashrates'];
- const connection = await DB.getConnection();
try {
for (const table of tables) {
if (!allowedTables.includes(table)) {
logger.debug(`Table ${table} cannot to be re-indexed (not allowed)`);
continue;
- };
+ }
- await this.$executeQuery(connection, `TRUNCATE ${table}`, true);
+ await this.$executeQuery(`TRUNCATE ${table}`, true);
if (table === 'hashrates') {
- await this.$executeQuery(connection, 'UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
+ await this.$executeQuery('UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
}
logger.notice(`Table ${table} has been truncated`);
}
} catch (e) {
logger.warn(`Unable to erase indexed data`);
}
- connection.release();
}
}
diff --git a/backend/src/api/liquid/elements-parser.ts b/backend/src/api/liquid/elements-parser.ts
index 24c7ab949..12439e037 100644
--- a/backend/src/api/liquid/elements-parser.ts
+++ b/backend/src/api/liquid/elements-parser.ts
@@ -2,7 +2,7 @@ import { IBitcoinApi } from '../bitcoin/bitcoin-api.interface';
import bitcoinClient from '../bitcoin/bitcoin-client';
import bitcoinSecondClient from '../bitcoin/bitcoin-second-client';
import { Common } from '../common';
-import { DB } from '../../database';
+import DB from '../../database';
import logger from '../../logger';
class ElementsParser {
@@ -33,10 +33,8 @@ class ElementsParser {
}
public async $getPegDataByMonth(): Promise {
- const connection = await DB.getConnection();
const query = `SELECT SUM(amount) AS amount, DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y-%m-01') AS date FROM elements_pegs GROUP BY DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y%m')`;
- const [rows] = await connection.query(query);
- connection.release();
+ const [rows] = await DB.query(query);
return rows;
}
@@ -79,7 +77,6 @@ class ElementsParser {
protected async $savePegToDatabase(height: number, blockTime: number, amount: number, txid: string,
txindex: number, bitcoinaddress: string, bitcointxid: string, bitcoinindex: number, final_tx: number): Promise {
- const connection = await DB.getConnection();
const query = `INSERT INTO elements_pegs(
block, datetime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`;
@@ -87,24 +84,19 @@ class ElementsParser {
const params: (string | number)[] = [
height, blockTime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
];
- await connection.query(query, params);
- connection.release();
+ await DB.query(query, params);
logger.debug(`Saved L-BTC peg from block height #${height} with TXID ${txid}.`);
}
protected async $getLatestBlockHeightFromDatabase(): Promise {
- const connection = await DB.getConnection();
const query = `SELECT number FROM state WHERE name = 'last_elements_block'`;
- const [rows] = await connection.query(query);
- connection.release();
+ const [rows] = await DB.query(query);
return rows[0]['number'];
}
protected async $saveLatestBlockToDatabase(blockHeight: number) {
- const connection = await DB.getConnection();
const query = `UPDATE state SET number = ? WHERE name = 'last_elements_block'`;
- await connection.query(query, [blockHeight]);
- connection.release();
+ await DB.query(query, [blockHeight]);
}
}
diff --git a/backend/src/api/mining.ts b/backend/src/api/mining.ts
index 6b7d2e01d..201813899 100644
--- a/backend/src/api/mining.ts
+++ b/backend/src/api/mining.ts
@@ -142,8 +142,6 @@ class Mining {
}
try {
- logger.info(`Indexing mining pools weekly hashrates`);
-
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
const hashrates: any[] = [];
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
@@ -155,6 +153,7 @@ class Mining {
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
let indexedThisRun = 0;
let totalIndexed = 0;
+ let newlyIndexed = 0;
let startedAt = new Date().getTime();
while (toTimestamp > genesisTimestamp) {
@@ -198,6 +197,7 @@ class Mining {
});
}
+ newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
@@ -217,7 +217,9 @@ class Mining {
}
this.weeklyHashrateIndexingStarted = false;
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing');
- logger.info(`Weekly pools hashrate indexing completed`);
+ if (newlyIndexed > 0) {
+ logger.info(`Indexed ${newlyIndexed} pools weekly hashrate`);
+ }
} catch (e) {
this.weeklyHashrateIndexingStarted = false;
throw e;
@@ -249,8 +251,6 @@ class Mining {
}
try {
- logger.info(`Indexing network daily hashrate`);
-
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const lastMidnight = this.getDateMidnight(new Date());
@@ -260,6 +260,7 @@ class Mining {
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
let indexedThisRun = 0;
let totalIndexed = 0;
+ let newlyIndexed = 0;
let startedAt = new Date().getTime();
while (toTimestamp > genesisTimestamp) {
@@ -294,6 +295,7 @@ class Mining {
});
if (hashrates.length > 10) {
+ newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
}
@@ -303,7 +305,8 @@ class Mining {
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
const formattedDate = new Date(fromTimestamp).toUTCString();
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
- logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
+ logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ` +
+ `~${daysLeft} days left to index`);
startedAt = new Date().getTime();
indexedThisRun = 0;
}
@@ -323,11 +326,14 @@ class Mining {
});
}
+ newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing');
this.hashrateIndexingStarted = false;
- logger.info(`Daily network hashrate indexing completed`);
+ if (newlyIndexed > 0) {
+ logger.info(`Indexed ${newlyIndexed} day of network hashrate`);
+ }
} catch (e) {
this.hashrateIndexingStarted = false;
throw e;
diff --git a/backend/src/api/pools-parser.ts b/backend/src/api/pools-parser.ts
index dee95912a..202fca1bd 100644
--- a/backend/src/api/pools-parser.ts
+++ b/backend/src/api/pools-parser.ts
@@ -1,5 +1,4 @@
-import { readFileSync } from 'fs';
-import { DB } from '../database';
+import DB from '../database';
import logger from '../logger';
import config from '../config';
@@ -59,13 +58,11 @@ class PoolsParser {
logger.debug(`Found ${poolNames.length} unique mining pools`);
// Get existing pools from the db
- const connection = await DB.getConnection();
let existingPools;
try {
- [existingPools] = await connection.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
+ [existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
} catch (e) {
- logger.err('Unable to get existing pools from the database, skipping pools.json import');
- connection.release();
+ logger.err('Cannot get existing pools from the database, skipping pools.json import');
return;
}
@@ -97,7 +94,7 @@ class PoolsParser {
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
- logger.debug(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
+ logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
}
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
@@ -145,17 +142,15 @@ class PoolsParser {
try {
if (finalPoolDataAdd.length > 0) {
- await connection.query({ sql: queryAdd, timeout: 120000 });
+ await DB.query({ sql: queryAdd, timeout: 120000 });
}
for (const query of updateQueries) {
- await connection.query({ sql: query, timeout: 120000 });
+ await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
- connection.release();
logger.info('Mining pools.json import completed');
} catch (e) {
- connection.release();
- logger.err(`Unable to import pools in the database!`);
+ logger.err(`Cannot import pools in the database`);
throw e;
}
}
@@ -164,16 +159,15 @@ class PoolsParser {
* Manually add the 'unknown pool'
*/
private async insertUnknownPool() {
- const connection = await DB.getConnection();
try {
- const [rows]: any[] = await connection.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
+ const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
if (rows.length === 0) {
- await connection.query({
+ await DB.query({
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
`});
} else {
- await connection.query(`UPDATE pools
+ await DB.query(`UPDATE pools
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
regexes='[]', addresses='[]',
slug='unknown'
@@ -183,8 +177,6 @@ class PoolsParser {
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool');
}
-
- connection.release();
}
}
diff --git a/backend/src/api/statistics.ts b/backend/src/api/statistics.ts
index 3d99adcb7..bd93b4c6e 100644
--- a/backend/src/api/statistics.ts
+++ b/backend/src/api/statistics.ts
@@ -1,5 +1,5 @@
import memPool from './mempool';
-import { DB } from '../database';
+import DB from '../database';
import logger from '../logger';
import { Statistic, TransactionExtended, OptimizedStatistic } from '../mempool.interfaces';
@@ -155,7 +155,6 @@ class Statistics {
}
private async $createZeroedStatistic(): Promise {
- const connection = await DB.getConnection();
try {
const query = `INSERT INTO statistics(
added,
@@ -206,17 +205,14 @@ class Statistics {
)
VALUES (NOW(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)`;
- const [result]: any = await connection.query(query);
- connection.release();
+ const [result]: any = await DB.query(query);
return result.insertId;
} catch (e) {
- connection.release();
logger.err('$create() error' + (e instanceof Error ? e.message : e));
}
}
private async $create(statistics: Statistic): Promise {
- const connection = await DB.getConnection();
try {
const query = `INSERT INTO statistics(
added,
@@ -314,11 +310,9 @@ class Statistics {
statistics.vsize_1800,
statistics.vsize_2000,
];
- const [result]: any = await connection.query(query, params);
- connection.release();
+ const [result]: any = await DB.query(query, params);
return result.insertId;
} catch (e) {
- connection.release();
logger.err('$create() error' + (e instanceof Error ? e.message : e));
}
}
@@ -421,10 +415,8 @@ class Statistics {
private async $get(id: number): Promise {
try {
- const connection = await DB.getConnection();
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics WHERE id = ?`;
- const [rows] = await connection.query(query, [id]);
- connection.release();
+ const [rows] = await DB.query(query, [id]);
if (rows[0]) {
return this.mapStatisticToOptimizedStatistic([rows[0]])[0];
}
@@ -435,11 +427,9 @@ class Statistics {
public async $list2H(): Promise {
try {
- const connection = await DB.getConnection();
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 120`;
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list2H() error' + (e instanceof Error ? e.message : e));
return [];
@@ -448,11 +438,9 @@ class Statistics {
public async $list24H(): Promise {
try {
- const connection = await DB.getConnection();
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 1440`;
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list24h() error' + (e instanceof Error ? e.message : e));
return [];
@@ -461,11 +449,9 @@ class Statistics {
public async $list1W(): Promise {
try {
- const connection = await DB.getConnection();
const query = this.getQueryForDaysAvg(300, '1 WEEK'); // 5m interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1W() error' + (e instanceof Error ? e.message : e));
return [];
@@ -474,11 +460,9 @@ class Statistics {
public async $list1M(): Promise {
try {
- const connection = await DB.getConnection();
const query = this.getQueryForDaysAvg(1800, '1 MONTH'); // 30m interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1M() error' + (e instanceof Error ? e.message : e));
return [];
@@ -487,11 +471,9 @@ class Statistics {
public async $list3M(): Promise {
try {
- const connection = await DB.getConnection();
const query = this.getQueryForDaysAvg(7200, '3 MONTH'); // 2h interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list3M() error' + (e instanceof Error ? e.message : e));
return [];
@@ -500,11 +482,9 @@ class Statistics {
public async $list6M(): Promise {
try {
- const connection = await DB.getConnection();
- const query = this.getQueryForDaysAvg(10800, '6 MONTH'); // 3h interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const query = this.getQueryForDaysAvg(10800, '6 MONTH'); // 3h interval
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list6M() error' + (e instanceof Error ? e.message : e));
return [];
@@ -513,11 +493,9 @@ class Statistics {
public async $list1Y(): Promise {
try {
- const connection = await DB.getConnection();
const query = this.getQueryForDays(28800, '1 YEAR'); // 8h interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list1Y() error' + (e instanceof Error ? e.message : e));
return [];
@@ -526,11 +504,9 @@ class Statistics {
public async $list2Y(): Promise {
try {
- const connection = await DB.getConnection();
- const query = this.getQueryForDays(28800, "2 YEAR"); // 8h interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const query = this.getQueryForDays(28800, '2 YEAR'); // 8h interval
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list2Y() error' + (e instanceof Error ? e.message : e));
return [];
@@ -539,11 +515,9 @@ class Statistics {
public async $list3Y(): Promise {
try {
- const connection = await DB.getConnection();
- const query = this.getQueryForDays(43200, "3 YEAR"); // 12h interval
- const [rows] = await connection.query({ sql: query, timeout: this.queryTimeout });
- connection.release();
- return this.mapStatisticToOptimizedStatistic(rows);
+ const query = this.getQueryForDays(43200, '3 YEAR'); // 12h interval
+ const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
+ return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list3Y() error' + (e instanceof Error ? e.message : e));
return [];
diff --git a/backend/src/database.ts b/backend/src/database.ts
index 55be0ffc3..3816154cd 100644
--- a/backend/src/database.ts
+++ b/backend/src/database.ts
@@ -1,51 +1,51 @@
import config from './config';
-import { createPool, PoolConnection } from 'mysql2/promise';
+import { createPool, Pool, PoolConnection } from 'mysql2/promise';
import logger from './logger';
import { PoolOptions } from 'mysql2/typings/mysql';
-export class DB {
- static poolConfig = ():PoolOptions => {
- let poolConfig:PoolOptions = {
- port: config.DATABASE.PORT,
- database: config.DATABASE.DATABASE,
- user: config.DATABASE.USERNAME,
- password: config.DATABASE.PASSWORD,
- connectionLimit: 10,
- supportBigNumbers: true,
- timezone: '+00:00',
- }
-
- if (config.DATABASE.SOCKET !== "") {
- poolConfig.socketPath = config.DATABASE.SOCKET;
+ class DB {
+ constructor() {
+ if (config.DATABASE.SOCKET !== '') {
+ this.poolConfig.socketPath = config.DATABASE.SOCKET;
} else {
- poolConfig.host = config.DATABASE.HOST;
+ this.poolConfig.host = config.DATABASE.HOST;
}
-
- return poolConfig;
}
-
- static pool = createPool(DB.poolConfig());
+ private pool: Pool | null = null;
+ private poolConfig: PoolOptions = {
+ port: config.DATABASE.PORT,
+ database: config.DATABASE.DATABASE,
+ user: config.DATABASE.USERNAME,
+ password: config.DATABASE.PASSWORD,
+ connectionLimit: 10,
+ supportBigNumbers: true,
+ timezone: '+00:00',
+ };
- static connectionsReady: number[] = [];
+ public async query(query, params?) {
+ const pool = await this.getPool();
+ return pool.query(query, params);
+ }
- static async getConnection() {
- const connection: PoolConnection = await DB.pool.getConnection();
- const connectionId = connection['connection'].connectionId;
- if (!DB.connectionsReady.includes(connectionId)) {
- await connection.query(`SET time_zone='+00:00';`);
- this.connectionsReady.push(connectionId);
+ public async checkDbConnection() {
+ try {
+ await this.query('SELECT ?', [1]);
+ logger.info('Database connection established.');
+ } catch (e) {
+ logger.err('Could not connect to database: ' + (e instanceof Error ? e.message : e));
+ process.exit(1);
}
- return connection;
+ }
+
+ private async getPool(): Promise {
+ if (this.pool === null) {
+ this.pool = createPool(this.poolConfig);
+ this.pool.on('connection', function (newConnection: PoolConnection) {
+ newConnection.query(`SET time_zone='+00:00'`);
+ });
+ }
+ return this.pool;
}
}
-export async function checkDbConnection() {
- try {
- const connection = await DB.getConnection();
- logger.info('Database connection established.');
- connection.release();
- } catch (e) {
- logger.err('Could not connect to database: ' + (e instanceof Error ? e.message : e));
- process.exit(1);
- }
-}
+export default new DB();
diff --git a/backend/src/index.ts b/backend/src/index.ts
index c8e98c0b7..c2f111c65 100644
--- a/backend/src/index.ts
+++ b/backend/src/index.ts
@@ -5,7 +5,7 @@ import * as WebSocket from 'ws';
import * as cluster from 'cluster';
import axios from 'axios';
-import { checkDbConnection, DB } from './database';
+import DB from './database';
import config from './config';
import routes from './routes';
import blocks from './api/blocks';
@@ -89,11 +89,11 @@ class Server {
diskCache.loadMempoolCache();
if (config.DATABASE.ENABLED) {
- await checkDbConnection();
+ await DB.checkDbConnection();
try {
if (process.env.npm_config_reindex != undefined) { // Re-index requests
const tables = process.env.npm_config_reindex.split(',');
- logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds from now (using '--reindex') ...`);
+ logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
await Common.sleep(5000);
await databaseMigration.$truncateIndexedData(tables);
}
@@ -169,8 +169,12 @@ class Server {
}
async $resetHashratesIndexingState() {
- await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
- await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
+ try {
+ await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
+ await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
+ } catch (e) {
+ logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
+ }
}
async $runIndexingWhenReady() {
@@ -184,11 +188,11 @@ class Server {
await BlocksRepository.$deleteBlocks(10);
await HashratesRepository.$deleteLastEntries();
}
- blocks.$generateBlockDatabase();
+ await blocks.$generateBlockDatabase();
await mining.$generateNetworkHashrateHistory();
await mining.$generatePoolHashrateHistory();
} catch (e) {
- logger.err(`Unable to run indexing right now, trying again later. ` + e);
+ logger.err(`Indexing failed, trying again later. Reason: ` + (e instanceof Error ? e.message : e));
}
}
diff --git a/backend/src/repositories/BlocksRepository.ts b/backend/src/repositories/BlocksRepository.ts
index 2ec97ce88..8e96a0c38 100644
--- a/backend/src/repositories/BlocksRepository.ts
+++ b/backend/src/repositories/BlocksRepository.ts
@@ -1,5 +1,5 @@
-import { BlockExtended, PoolTag } from '../mempool.interfaces';
-import { DB } from '../database';
+import { BlockExtended } from '../mempool.interfaces';
+import DB from '../database';
import logger from '../logger';
import { Common } from '../api/common';
import { prepareBlock } from '../utils/blocks-utils';
@@ -10,11 +10,7 @@ class BlocksRepository {
* Save indexed block data in the database
*/
public async $saveBlockInDatabase(block: BlockExtended) {
- let connection;
-
try {
- connection = await DB.getConnection();
-
const query = `INSERT INTO blocks(
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
@@ -52,15 +48,12 @@ class BlocksRepository {
block.extras.avgFeeRate,
];
- await connection.query(query, params);
- connection.release();
+ await DB.query(query, params);
} catch (e: any) {
- connection.release();
- if (e.errno === 1062) { // ER_DUP_ENTRY
+ if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
} else {
- connection.release();
- logger.err('$saveBlockInDatabase() error: ' + (e instanceof Error ? e.message : e));
+ logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -74,16 +67,13 @@ class BlocksRepository {
return [];
}
- let connection;
try {
- connection = await DB.getConnection();
- const [rows]: any[] = await connection.query(`
+ const [rows]: any[] = await DB.query(`
SELECT height
FROM blocks
WHERE height <= ? AND height >= ?
ORDER BY height DESC;
`, [startHeight, endHeight]);
- connection.release();
const indexedBlockHeights: number[] = [];
rows.forEach((row: any) => { indexedBlockHeights.push(row.height); });
@@ -92,8 +82,7 @@ class BlocksRepository {
return missingBlocksHeights;
} catch (e) {
- connection.release();
- logger.err('$getMissingBlocksBetweenHeights() error' + (e instanceof Error ? e.message : e));
+ logger.err('Cannot retrieve blocks list to index. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -121,16 +110,11 @@ class BlocksRepository {
query += ` GROUP by pools.id`;
- let connection;
try {
- connection = await DB.getConnection();
- const [rows] = await connection.query(query, params);
- connection.release();
-
+ const [rows] = await DB.query(query, params);
return rows;
} catch (e) {
- connection.release();
- logger.err('$getEmptyBlocks() error' + (e instanceof Error ? e.message : e));
+ logger.err('Cannot count empty blocks. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -159,16 +143,11 @@ class BlocksRepository {
query += ` blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
- let connection;
try {
- connection = await DB.getConnection();
- const [rows] = await connection.query(query, params);
- connection.release();
-
+ const [rows] = await DB.query(query, params);
return rows[0].blockCount;
} catch (e) {
- connection.release();
- logger.err('$blockCount() error' + (e instanceof Error ? e.message : e));
+ logger.err(`Cannot count blocks for this pool (using offset). Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -199,16 +178,11 @@ class BlocksRepository {
}
query += ` blockTimestamp BETWEEN FROM_UNIXTIME('${from}') AND FROM_UNIXTIME('${to}')`;
- let connection;
try {
- connection = await DB.getConnection();
- const [rows] = await connection.query(query, params);
- connection.release();
-
+ const [rows] = await DB.query(query, params);
return rows[0];
} catch (e) {
- connection.release();
- logger.err('$blockCountBetweenTimestamp() error' + (e instanceof Error ? e.message : e));
+ logger.err(`Cannot count blocks for this pool (using timestamps). Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -222,11 +196,8 @@ class BlocksRepository {
ORDER BY height
LIMIT 1;`;
- let connection;
try {
- connection = await DB.getConnection();
- const [rows]: any[] = await connection.query(query);
- connection.release();
+ const [rows]: any[] = await DB.query(query);
if (rows.length <= 0) {
return -1;
@@ -234,8 +205,7 @@ class BlocksRepository {
return rows[0].blockTimestamp;
} catch (e) {
- connection.release();
- logger.err('$oldestBlockTimestamp() error' + (e instanceof Error ? e.message : e));
+ logger.err('Cannot get oldest indexed block timestamp. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
@@ -243,7 +213,7 @@ class BlocksRepository {
/**
* Get blocks mined by a specific mining pool
*/
- public async $getBlocksByPool(slug: string, startHeight: number | undefined = undefined): Promise
The first miner on the network to find a suitable block earns all the transaction fees from the transactions in that block. As a result, miners tend to prioritize transactions with higher transaction fees.
"
+ },
+ {
+ type: "endpoint",
+ category: "basics",
+ showConditions: bitcoinNetworks,
+ fragment: "what-are-mining-pools",
+ title: "What are mining pools?",
+ answer: "Mining pools are groups of miners that combine their computational power in order to increase the probability of finding new blocks."
+ },
+ {
+ type: "category",
+ category: "help",
+ fragment: "help-stuck-transaction",
+ title: "Help! My transaction is stuck",
+ showConditions: bitcoinNetworks
+ },
+ {
+ type: "endpoint",
+ category: "help",
+ showConditions: bitcoinNetworks,
+ fragment: "why-is-transaction-stuck-in-mempool",
+ title: "Why is my transaction stuck in the mempool?",
+ answer: "
Miners decide which transactions are included in the blocks they mine, so they usually prioritize transactions which pay them the highest transaction fees (transaction fees are measured in sats per virtual byte, or sat/vB). If it's been a while and your transcation hasn't been confirmed, your transaction probably has a lower transaction fee relative to other transactions currently in the mempool.
"
+ },
+ {
+ type: "endpoint",
+ category: "help",
+ showConditions: bitcoinNetworks,
+ fragment: "how-to-get-transaction-confirmed-quickly",
+ title: "How can I get my transaction confirmed more quickly?",
+ answer: "
If your wallet supports RBF, and if your transaction was created with RBF enabled, you can bump the fee higher.
Otherwise, if your wallet does not support RBF, you can increase the effective fee rate of your transaction by spending its change output using a higher fee. This is called CPFP.
"
+ },
+ {
+ type: "endpoint",
+ category: "help",
+ showConditions: bitcoinNetworks,
+ fragment: "how-prevent-stuck-transaction",
+ title: "How can I prevent a transaction from getting stuck in the future?",
+ answer: "
You must use an adequate transaction fee commensurate with how quickly you need the transaction to be confirmed. Also consider using RBF if your wallet supports it so that you can bump the fee rate if needed.
"
+ },
+ {
+ type: "category",
+ category: "using",
+ fragment: "using-this-website",
+ title: "Using this website",
+ showConditions: bitcoinNetworks
+ },
+ {
+ type: "endpoint",
+ category: "how-to",
+ showConditions: bitcoinNetworks,
+ fragment: "looking-up-transactions",
+ title: "How can I look up a transaction?",
+ answer: "Search for the transaction ID in the search box at the top-right of this website."
+ },
+ {
+ type: "endpoint",
+ category: "how-to",
+ showConditions: bitcoinNetworks,
+ fragment: "looking-up-addresses",
+ title: "How can I look up an address?",
+ answer: "Search for the address in the search box at the top-right of this website."
+ },
+ {
+ type: "endpoint",
+ category: "how-to",
+ showConditions: bitcoinNetworks,
+ fragment: "looking-up-blocks",
+ title: "How can I look up a block?",
+ answer: "Search for the block number (or block hash) in the search box at the top-right of this website."
+ },
+ {
+ type: "endpoint",
+ category: "how-to",
+ showConditions: bitcoinNetworks,
+ fragment: "looking-up-fee-estimates",
+ title: "How can I look up fee estimates?",
+ answer: "
Low priority is suggested for confirmation within 6 blocks (~1 hour), Medium priority is suggested for confirmation within 3 blocks (~30 minutes), and High priority is suggested for confirmation in the next block (~10 minutes).
"
+ },
+ {
+ type: "endpoint",
+ category: "how-to",
+ showConditions: bitcoinNetworks,
+ fragment: "looking-up-historical-trends",
+ title: "How can I explore historical trends?",
+ answer: "See the graphs page for aggregate trends over time: mempool size over time and incoming transaction velocity over time."
+ },
+ {
+ type: "category",
+ category: "advanced",
+ fragment: "advanced",
+ title: "Advanced",
+ showConditions: bitcoinNetworks
+ },
+ {
+ type: "endpoint",
+ category: "advanced",
+ showConditions: bitcoinNetworks,
+ fragment: "who-runs-this-website",
+ title: "Who runs this website?",
+ answer: "The official mempool.space website is operated by The Mempool Open Source Project. See more information on our About page. There are also many unofficial instances of this website operated by individual members of the Bitcoin community."
+ },
+ {
+ type: "endpoint",
+ category: "advanced",
+ showConditions: bitcoinNetworks,
+ fragment: "host-my-own-instance-raspberry-pi",
+ title: "How can I host my own instance on a Raspberry Pi?",
+ answer: "We support one-click installation on a number of Raspberry Pi full-node distros including Umbrel, RaspiBlitz, MyNode, and RoninDojo."
+ },
+ {
+ type: "endpoint",
+ category: "advanced",
+ showConditions: bitcoinNetworks,
+ fragment: "host-my-own-instance-linux-server",
+ title: "How can I host my own instance on a Linux server?",
+ answer: "You can manually install mempool on your own Linux server, but this requires advanced sysadmin skills since you will be manually configuring everything. We do not provide support for manual deployments."
+ },
+ {
+ type: "endpoint",
+ category: "advanced",
+ showConditions: bitcoinNetworks,
+ fragment: "install-mempool-with-docker",
+ title: "Can I install Mempool using Docker?",
+ answer: "Yes, we publish Docker images (or you can build your own), and provide an example docker-compose template."
+ }
+];
diff --git a/frontend/src/app/components/docs/api-docs-nav.component.html b/frontend/src/app/components/docs/api-docs-nav.component.html
index 83eaaf9e8..c8460ada4 100644
--- a/frontend/src/app/components/docs/api-docs-nav.component.html
+++ b/frontend/src/app/components/docs/api-docs-nav.component.html
@@ -1,4 +1,4 @@
-