mirror of
https://github.com/mempool/mempool.git
synced 2024-11-20 10:21:52 +01:00
Merge branch 'master' into simon/remove-locutus-lib
This commit is contained in:
commit
417542a217
@ -147,7 +147,7 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
scriptpubkey: vout.scriptPubKey.hex,
|
||||
scriptpubkey_address: vout.scriptPubKey && vout.scriptPubKey.address ? vout.scriptPubKey.address
|
||||
: vout.scriptPubKey.addresses ? vout.scriptPubKey.addresses[0] : '',
|
||||
scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.asm) : '',
|
||||
scriptpubkey_asm: vout.scriptPubKey.asm ? this.convertScriptSigAsm(vout.scriptPubKey.hex) : '',
|
||||
scriptpubkey_type: this.translateScriptPubKeyType(vout.scriptPubKey.type),
|
||||
};
|
||||
});
|
||||
@ -157,7 +157,7 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
is_coinbase: !!vin.coinbase,
|
||||
prevout: null,
|
||||
scriptsig: vin.scriptSig && vin.scriptSig.hex || vin.coinbase || '',
|
||||
scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.asm) || '',
|
||||
scriptsig_asm: vin.scriptSig && this.convertScriptSigAsm(vin.scriptSig.hex) || '',
|
||||
sequence: vin.sequence,
|
||||
txid: vin.txid || '',
|
||||
vout: vin.vout || 0,
|
||||
@ -290,38 +290,68 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
return transaction;
|
||||
}
|
||||
|
||||
private convertScriptSigAsm(str: string): string {
|
||||
const a = str.split(' ');
|
||||
private convertScriptSigAsm(hex: string): string {
|
||||
const buf = Buffer.from(hex, 'hex');
|
||||
|
||||
const b: string[] = [];
|
||||
a.forEach((chunk) => {
|
||||
if (chunk.substr(0, 3) === 'OP_') {
|
||||
chunk = chunk.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1');
|
||||
chunk = chunk.replace('OP_CHECKSEQUENCEVERIFY', 'OP_CSV');
|
||||
chunk = chunk.replace('OP_CHECKLOCKTIMEVERIFY', 'OP_CLTV');
|
||||
b.push(chunk);
|
||||
} else {
|
||||
chunk = chunk.replace('[ALL]', '01');
|
||||
if (chunk === '0') {
|
||||
b.push('OP_0');
|
||||
} else if (chunk.match(/^[^0]\d*$/)) {
|
||||
const chunkInt = parseInt(chunk, 10);
|
||||
if (chunkInt < 0) {
|
||||
b.push('OP_PUSHNUM_NEG' + -chunkInt);
|
||||
} else {
|
||||
b.push('OP_PUSHNUM_' + chunk);
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
while (i < buf.length) {
|
||||
const op = buf[i];
|
||||
if (op >= 0x01 && op <= 0x4e) {
|
||||
i++;
|
||||
let push: number;
|
||||
if (op === 0x4c) {
|
||||
push = buf.readUInt8(i);
|
||||
b.push('OP_PUSHDATA1');
|
||||
i += 1;
|
||||
} else if (op === 0x4d) {
|
||||
push = buf.readUInt16LE(i);
|
||||
b.push('OP_PUSHDATA2');
|
||||
i += 2;
|
||||
} else if (op === 0x4e) {
|
||||
push = buf.readUInt32LE(i);
|
||||
b.push('OP_PUSHDATA4');
|
||||
i += 4;
|
||||
} else {
|
||||
const dataLength = Math.round(chunk.length / 2);
|
||||
if (dataLength > 255) {
|
||||
b.push('OP_PUSHDATA2' + ' ' + chunk);
|
||||
} else if (dataLength > 75) {
|
||||
b.push('OP_PUSHDATA1' + ' ' + chunk);
|
||||
push = op;
|
||||
b.push('OP_PUSHBYTES_' + push);
|
||||
}
|
||||
|
||||
const data = buf.slice(i, i + push);
|
||||
if (data.length !== push) {
|
||||
break;
|
||||
}
|
||||
|
||||
b.push(data.toString('hex'));
|
||||
i += data.length;
|
||||
} else {
|
||||
if (op === 0x00) {
|
||||
b.push('OP_0');
|
||||
} else if (op === 0x4f) {
|
||||
b.push('OP_PUSHNUM_NEG1');
|
||||
} else if (op === 0xb1) {
|
||||
b.push('OP_CLTV');
|
||||
} else if (op === 0xb2) {
|
||||
b.push('OP_CSV');
|
||||
} else if (op === 0xba) {
|
||||
b.push('OP_CHECKSIGADD');
|
||||
} else {
|
||||
const opcode = bitcoinjs.script.toASM([ op ]);
|
||||
if (opcode && op < 0xfd) {
|
||||
if (/^OP_(\d+)$/.test(opcode)) {
|
||||
b.push(opcode.replace(/^OP_(\d+)$/, 'OP_PUSHNUM_$1'));
|
||||
} else {
|
||||
b.push(opcode);
|
||||
}
|
||||
} else {
|
||||
b.push('OP_PUSHBYTES_' + dataLength + ' ' + chunk);
|
||||
b.push('OP_RETURN_' + op);
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return b.join(' ');
|
||||
}
|
||||
|
||||
@ -332,21 +362,21 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
|
||||
if (vin.prevout.scriptpubkey_type === 'p2sh') {
|
||||
const redeemScript = vin.scriptsig_asm.split(' ').reverse()[0];
|
||||
vin.inner_redeemscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(redeemScript, 'hex')));
|
||||
vin.inner_redeemscript_asm = this.convertScriptSigAsm(redeemScript);
|
||||
if (vin.witness && vin.witness.length > 2) {
|
||||
const witnessScript = vin.witness[vin.witness.length - 1];
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
|
||||
}
|
||||
}
|
||||
|
||||
if (vin.prevout.scriptpubkey_type === 'v0_p2wsh' && vin.witness) {
|
||||
const witnessScript = vin.witness[vin.witness.length - 1];
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
|
||||
}
|
||||
|
||||
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
|
||||
const witnessScript = vin.witness[vin.witness.length - 2];
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(bitcoinjs.script.toASM(Buffer.from(witnessScript, 'hex')));
|
||||
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,9 +75,12 @@ class Blocks {
|
||||
transactions.push(tx);
|
||||
transactionsFetched++;
|
||||
} catch (e) {
|
||||
logger.debug('Error fetching block tx: ' + (e instanceof Error ? e.message : e));
|
||||
if (i === 0) {
|
||||
throw new Error('Failed to fetch Coinbase transaction: ' + txIds[i]);
|
||||
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
|
||||
logger.err(msg);
|
||||
throw new Error(msg);
|
||||
} else {
|
||||
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -137,8 +140,8 @@ class Blocks {
|
||||
pool = await poolsRepository.$getUnknownPool();
|
||||
}
|
||||
|
||||
if (!pool) { // Something is wrong with the pools table, ignore pool indexing
|
||||
logger.err('Unable to find pool, nor getting the unknown pool. Is the "pools" table empty?');
|
||||
if (!pool) { // We should never have this situation in practise
|
||||
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. Check your "pools" table entries`);
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
@ -214,11 +217,12 @@ class Blocks {
|
||||
|
||||
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
|
||||
|
||||
logger.info(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
|
||||
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
|
||||
|
||||
const chunkSize = 10000;
|
||||
let totaIndexed = await blocksRepository.$blockCount(null, null);
|
||||
let indexedThisRun = 0;
|
||||
let newlyIndexed = 0;
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
|
||||
@ -228,12 +232,11 @@ class Blocks {
|
||||
const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights(
|
||||
currentBlockHeight, endBlock);
|
||||
if (missingBlockHeights.length <= 0) {
|
||||
logger.debug(`No missing blocks between #${currentBlockHeight} to #${endBlock}`);
|
||||
currentBlockHeight -= chunkSize;
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
|
||||
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
|
||||
|
||||
for (const blockHeight of missingBlockHeights) {
|
||||
if (blockHeight < lastBlockToIndex) {
|
||||
@ -255,14 +258,16 @@ class Blocks {
|
||||
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
|
||||
newlyIndexed++;
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
}
|
||||
|
||||
currentBlockHeight -= chunkSize;
|
||||
}
|
||||
logger.info('Block indexing completed');
|
||||
logger.info(`Indexed ${newlyIndexed} blocks`);
|
||||
} catch (e) {
|
||||
logger.err('An error occured in $generateBlockDatabase(). Trying again later. ' + e);
|
||||
logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
this.blockIndexingStarted = false;
|
||||
return;
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { PoolConnection } from 'mysql2/promise';
|
||||
import config from '../config';
|
||||
import { DB } from '../database';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
|
||||
@ -77,116 +76,112 @@ class DatabaseMigration {
|
||||
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
|
||||
|
||||
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
await this.$executeQuery(connection, this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(connection, this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(connection, `CREATE INDEX added ON statistics (added);`);
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(connection, this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery(connection, 'DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(connection, this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
// Cleanup original blocks fields type
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
// We also fix the pools.id type so we need to drop/re-create the foreign key
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
// Add new block indexing fields
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery(connection, 'DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(connection, this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(connection, `ALTER TABLE blocks
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(`ALTER TABLE blocks
|
||||
ADD avg_fee INT UNSIGNED NULL,
|
||||
ADD avg_fee_rate INT UNSIGNED NULL
|
||||
`);
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery(connection, 'ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
}
|
||||
|
||||
connection.release();
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -203,13 +198,11 @@ class DatabaseMigration {
|
||||
return;
|
||||
}
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
|
||||
try {
|
||||
// We don't use "CREATE INDEX IF NOT EXISTS" because it is not supported on old mariadb version 5.X
|
||||
const query = `SELECT COUNT(1) hasIndex FROM INFORMATION_SCHEMA.STATISTICS
|
||||
WHERE table_schema=DATABASE() AND table_name='statistics' AND index_name='added';`;
|
||||
const [rows] = await this.$executeQuery(connection, query, true);
|
||||
const [rows] = await this.$executeQuery(query, true);
|
||||
if (rows[0].hasIndex === 0) {
|
||||
logger.debug('MIGRATIONS: `statistics.added` is not indexed');
|
||||
this.statisticsAddedIndexed = false;
|
||||
@ -223,28 +216,24 @@ class DatabaseMigration {
|
||||
logger.err('MIGRATIONS: Unable to check if `statistics.added` INDEX exist or not.');
|
||||
this.statisticsAddedIndexed = true;
|
||||
}
|
||||
|
||||
connection.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Small query execution wrapper to log all executed queries
|
||||
*/
|
||||
private async $executeQuery(connection: PoolConnection, query: string, silent: boolean = false): Promise<any> {
|
||||
private async $executeQuery(query: string, silent: boolean = false): Promise<any> {
|
||||
if (!silent) {
|
||||
logger.debug('MIGRATIONS: Execute query:\n' + query);
|
||||
}
|
||||
return connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
return DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if 'table' exists in the database
|
||||
*/
|
||||
private async $checkIfTableExists(table: string): Promise<boolean> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = '${config.DATABASE.DATABASE}' AND TABLE_NAME = '${table}'`;
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return rows[0]['COUNT(*)'] === 1;
|
||||
}
|
||||
|
||||
@ -252,10 +241,8 @@ class DatabaseMigration {
|
||||
* Get current database version
|
||||
*/
|
||||
private async $getSchemaVersionFromDatabase(): Promise<number> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT number FROM state WHERE name = 'schema_version';`;
|
||||
const [rows] = await this.$executeQuery(connection, query, true);
|
||||
connection.release();
|
||||
const [rows] = await this.$executeQuery(query, true);
|
||||
return rows[0]['number'];
|
||||
}
|
||||
|
||||
@ -263,8 +250,6 @@ class DatabaseMigration {
|
||||
* Create the `state` table
|
||||
*/
|
||||
private async $createMigrationStateTable(): Promise<void> {
|
||||
const connection = await DB.getConnection();
|
||||
|
||||
try {
|
||||
const query = `CREATE TABLE IF NOT EXISTS state (
|
||||
name varchar(25) NOT NULL,
|
||||
@ -272,15 +257,12 @@ class DatabaseMigration {
|
||||
string varchar(100) NULL,
|
||||
CONSTRAINT name_unique UNIQUE (name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
await this.$executeQuery(connection, query);
|
||||
await this.$executeQuery(query);
|
||||
|
||||
// Set initial values
|
||||
await this.$executeQuery(connection, `INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(connection, `INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
|
||||
connection.release();
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -295,18 +277,14 @@ class DatabaseMigration {
|
||||
}
|
||||
transactionQueries.push(this.getUpdateToLatestSchemaVersionQuery());
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
await this.$executeQuery(connection, 'START TRANSACTION;');
|
||||
await this.$executeQuery('START TRANSACTION;');
|
||||
for (const query of transactionQueries) {
|
||||
await this.$executeQuery(connection, query);
|
||||
await this.$executeQuery(query);
|
||||
}
|
||||
await this.$executeQuery(connection, 'COMMIT;');
|
||||
|
||||
connection.release();
|
||||
await this.$executeQuery('COMMIT;');
|
||||
} catch (e) {
|
||||
await this.$executeQuery(connection, 'ROLLBACK;');
|
||||
connection.release();
|
||||
await this.$executeQuery('ROLLBACK;');
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -346,14 +324,12 @@ class DatabaseMigration {
|
||||
* Print current database version
|
||||
*/
|
||||
private async $printDatabaseVersion() {
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const [rows] = await this.$executeQuery(connection, 'SELECT VERSION() as version;', true);
|
||||
const [rows] = await this.$executeQuery('SELECT VERSION() as version;', true);
|
||||
logger.debug(`MIGRATIONS: Database engine version '${rows[0].version}'`);
|
||||
} catch (e) {
|
||||
logger.debug(`MIGRATIONS: Could not fetch database engine version. ` + e);
|
||||
}
|
||||
connection.release();
|
||||
}
|
||||
|
||||
// Couple of wrappers to clean the main logic
|
||||
@ -490,24 +466,22 @@ class DatabaseMigration {
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates'];
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
for (const table of tables) {
|
||||
if (!allowedTables.includes(table)) {
|
||||
logger.debug(`Table ${table} cannot to be re-indexed (not allowed)`);
|
||||
continue;
|
||||
};
|
||||
}
|
||||
|
||||
await this.$executeQuery(connection, `TRUNCATE ${table}`, true);
|
||||
await this.$executeQuery(`TRUNCATE ${table}`, true);
|
||||
if (table === 'hashrates') {
|
||||
await this.$executeQuery(connection, 'UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
|
||||
await this.$executeQuery('UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
|
||||
}
|
||||
logger.notice(`Table ${table} has been truncated`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(`Unable to erase indexed data`);
|
||||
}
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { IBitcoinApi } from '../bitcoin/bitcoin-api.interface';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import bitcoinSecondClient from '../bitcoin/bitcoin-second-client';
|
||||
import { Common } from '../common';
|
||||
import { DB } from '../../database';
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
|
||||
class ElementsParser {
|
||||
@ -33,10 +33,8 @@ class ElementsParser {
|
||||
}
|
||||
|
||||
public async $getPegDataByMonth(): Promise<any> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT SUM(amount) AS amount, DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y-%m-01') AS date FROM elements_pegs GROUP BY DATE_FORMAT(FROM_UNIXTIME(datetime), '%Y%m')`;
|
||||
const [rows] = await connection.query<any>(query);
|
||||
connection.release();
|
||||
const [rows] = await DB.query(query);
|
||||
return rows;
|
||||
}
|
||||
|
||||
@ -79,7 +77,6 @@ class ElementsParser {
|
||||
|
||||
protected async $savePegToDatabase(height: number, blockTime: number, amount: number, txid: string,
|
||||
txindex: number, bitcoinaddress: string, bitcointxid: string, bitcoinindex: number, final_tx: number): Promise<void> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `INSERT INTO elements_pegs(
|
||||
block, datetime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
@ -87,24 +84,19 @@ class ElementsParser {
|
||||
const params: (string | number)[] = [
|
||||
height, blockTime, amount, txid, txindex, bitcoinaddress, bitcointxid, bitcoinindex, final_tx
|
||||
];
|
||||
await connection.query(query, params);
|
||||
connection.release();
|
||||
await DB.query(query, params);
|
||||
logger.debug(`Saved L-BTC peg from block height #${height} with TXID ${txid}.`);
|
||||
}
|
||||
|
||||
protected async $getLatestBlockHeightFromDatabase(): Promise<number> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT number FROM state WHERE name = 'last_elements_block'`;
|
||||
const [rows] = await connection.query<any>(query);
|
||||
connection.release();
|
||||
const [rows] = await DB.query(query);
|
||||
return rows[0]['number'];
|
||||
}
|
||||
|
||||
protected async $saveLatestBlockToDatabase(blockHeight: number) {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `UPDATE state SET number = ? WHERE name = 'last_elements_block'`;
|
||||
await connection.query<any>(query, [blockHeight]);
|
||||
connection.release();
|
||||
await DB.query(query, [blockHeight]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,8 +142,6 @@ class Mining {
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Indexing mining pools weekly hashrates`);
|
||||
|
||||
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
||||
const hashrates: any[] = [];
|
||||
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
@ -155,6 +153,7 @@ class Mining {
|
||||
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
|
||||
let indexedThisRun = 0;
|
||||
let totalIndexed = 0;
|
||||
let newlyIndexed = 0;
|
||||
let startedAt = new Date().getTime();
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
@ -198,6 +197,7 @@ class Mining {
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
|
||||
@ -217,7 +217,9 @@ class Mining {
|
||||
}
|
||||
this.weeklyHashrateIndexingStarted = false;
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing');
|
||||
logger.info(`Weekly pools hashrate indexing completed`);
|
||||
if (newlyIndexed > 0) {
|
||||
logger.info(`Indexed ${newlyIndexed} pools weekly hashrate`);
|
||||
}
|
||||
} catch (e) {
|
||||
this.weeklyHashrateIndexingStarted = false;
|
||||
throw e;
|
||||
@ -249,8 +251,6 @@ class Mining {
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Indexing network daily hashrate`);
|
||||
|
||||
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
const lastMidnight = this.getDateMidnight(new Date());
|
||||
@ -260,6 +260,7 @@ class Mining {
|
||||
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
|
||||
let indexedThisRun = 0;
|
||||
let totalIndexed = 0;
|
||||
let newlyIndexed = 0;
|
||||
let startedAt = new Date().getTime();
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
@ -294,6 +295,7 @@ class Mining {
|
||||
});
|
||||
|
||||
if (hashrates.length > 10) {
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
}
|
||||
@ -303,7 +305,8 @@ class Mining {
|
||||
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ` +
|
||||
`~${daysLeft} days left to index`);
|
||||
startedAt = new Date().getTime();
|
||||
indexedThisRun = 0;
|
||||
}
|
||||
@ -323,11 +326,14 @@ class Mining {
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing');
|
||||
this.hashrateIndexingStarted = false;
|
||||
logger.info(`Daily network hashrate indexing completed`);
|
||||
if (newlyIndexed > 0) {
|
||||
logger.info(`Indexed ${newlyIndexed} day of network hashrate`);
|
||||
}
|
||||
} catch (e) {
|
||||
this.hashrateIndexingStarted = false;
|
||||
throw e;
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { DB } from '../database';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import config from '../config';
|
||||
|
||||
@ -59,13 +58,11 @@ class PoolsParser {
|
||||
logger.debug(`Found ${poolNames.length} unique mining pools`);
|
||||
|
||||
// Get existing pools from the db
|
||||
const connection = await DB.getConnection();
|
||||
let existingPools;
|
||||
try {
|
||||
[existingPools] = await connection.query<any>({ sql: 'SELECT * FROM pools;', timeout: 120000 });
|
||||
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
|
||||
} catch (e) {
|
||||
logger.err('Unable to get existing pools from the database, skipping pools.json import');
|
||||
connection.release();
|
||||
logger.err('Cannot get existing pools from the database, skipping pools.json import');
|
||||
return;
|
||||
}
|
||||
|
||||
@ -97,7 +94,7 @@ class PoolsParser {
|
||||
if (slug === undefined) {
|
||||
// Only keep alphanumerical
|
||||
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||
logger.debug(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
|
||||
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
|
||||
}
|
||||
|
||||
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
|
||||
@ -145,17 +142,15 @@ class PoolsParser {
|
||||
|
||||
try {
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await connection.query<any>({ sql: queryAdd, timeout: 120000 });
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await connection.query<any>({ sql: query, timeout: 120000 });
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
connection.release();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err(`Unable to import pools in the database!`);
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -164,16 +159,15 @@ class PoolsParser {
|
||||
* Manually add the 'unknown pool'
|
||||
*/
|
||||
private async insertUnknownPool() {
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const [rows]: any[] = await connection.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
|
||||
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
|
||||
if (rows.length === 0) {
|
||||
await connection.query({
|
||||
await DB.query({
|
||||
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
|
||||
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
|
||||
`});
|
||||
} else {
|
||||
await connection.query(`UPDATE pools
|
||||
await DB.query(`UPDATE pools
|
||||
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
regexes='[]', addresses='[]',
|
||||
slug='unknown'
|
||||
@ -183,8 +177,6 @@ class PoolsParser {
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert "Unknown" mining pool');
|
||||
}
|
||||
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import memPool from './mempool';
|
||||
import { DB } from '../database';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
import { Statistic, TransactionExtended, OptimizedStatistic } from '../mempool.interfaces';
|
||||
@ -155,7 +155,6 @@ class Statistics {
|
||||
}
|
||||
|
||||
private async $createZeroedStatistic(): Promise<number | undefined> {
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@ -206,17 +205,14 @@ class Statistics {
|
||||
)
|
||||
VALUES (NOW(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)`;
|
||||
const [result]: any = await connection.query(query);
|
||||
connection.release();
|
||||
const [result]: any = await DB.query(query);
|
||||
return result.insertId;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$create() error' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $create(statistics: Statistic): Promise<number | undefined> {
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@ -314,11 +310,9 @@ class Statistics {
|
||||
statistics.vsize_1800,
|
||||
statistics.vsize_2000,
|
||||
];
|
||||
const [result]: any = await connection.query(query, params);
|
||||
connection.release();
|
||||
const [result]: any = await DB.query(query, params);
|
||||
return result.insertId;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$create() error' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
@ -421,10 +415,8 @@ class Statistics {
|
||||
|
||||
private async $get(id: number): Promise<OptimizedStatistic | undefined> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics WHERE id = ?`;
|
||||
const [rows] = await connection.query<any>(query, [id]);
|
||||
connection.release();
|
||||
const [rows] = await DB.query(query, [id]);
|
||||
if (rows[0]) {
|
||||
return this.mapStatisticToOptimizedStatistic([rows[0]])[0];
|
||||
}
|
||||
@ -435,11 +427,9 @@ class Statistics {
|
||||
|
||||
public async $list2H(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 120`;
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list2H() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -448,11 +438,9 @@ class Statistics {
|
||||
|
||||
public async $list24H(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics ORDER BY statistics.added DESC LIMIT 1440`;
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list24h() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -461,11 +449,9 @@ class Statistics {
|
||||
|
||||
public async $list1W(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDaysAvg(300, '1 WEEK'); // 5m interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list1W() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -474,11 +460,9 @@ class Statistics {
|
||||
|
||||
public async $list1M(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDaysAvg(1800, '1 MONTH'); // 30m interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list1M() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -487,11 +471,9 @@ class Statistics {
|
||||
|
||||
public async $list3M(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDaysAvg(7200, '3 MONTH'); // 2h interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list3M() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -500,11 +482,9 @@ class Statistics {
|
||||
|
||||
public async $list6M(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDaysAvg(10800, '6 MONTH'); // 3h interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const query = this.getQueryForDaysAvg(10800, '6 MONTH'); // 3h interval
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list6M() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -513,11 +493,9 @@ class Statistics {
|
||||
|
||||
public async $list1Y(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDays(28800, '1 YEAR'); // 8h interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list1Y() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -526,11 +504,9 @@ class Statistics {
|
||||
|
||||
public async $list2Y(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDays(28800, "2 YEAR"); // 8h interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const query = this.getQueryForDays(28800, '2 YEAR'); // 8h interval
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list2Y() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
@ -539,11 +515,9 @@ class Statistics {
|
||||
|
||||
public async $list3Y(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
const query = this.getQueryForDays(43200, "3 YEAR"); // 12h interval
|
||||
const [rows] = await connection.query<any>({ sql: query, timeout: this.queryTimeout });
|
||||
connection.release();
|
||||
return this.mapStatisticToOptimizedStatistic(rows);
|
||||
const query = this.getQueryForDays(43200, '3 YEAR'); // 12h interval
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$list3Y() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
|
@ -1,51 +1,51 @@
|
||||
import config from './config';
|
||||
import { createPool, PoolConnection } from 'mysql2/promise';
|
||||
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
|
||||
import logger from './logger';
|
||||
import { PoolOptions } from 'mysql2/typings/mysql';
|
||||
|
||||
export class DB {
|
||||
static poolConfig = ():PoolOptions => {
|
||||
let poolConfig:PoolOptions = {
|
||||
port: config.DATABASE.PORT,
|
||||
database: config.DATABASE.DATABASE,
|
||||
user: config.DATABASE.USERNAME,
|
||||
password: config.DATABASE.PASSWORD,
|
||||
connectionLimit: 10,
|
||||
supportBigNumbers: true,
|
||||
timezone: '+00:00',
|
||||
}
|
||||
|
||||
if (config.DATABASE.SOCKET !== "") {
|
||||
poolConfig.socketPath = config.DATABASE.SOCKET;
|
||||
class DB {
|
||||
constructor() {
|
||||
if (config.DATABASE.SOCKET !== '') {
|
||||
this.poolConfig.socketPath = config.DATABASE.SOCKET;
|
||||
} else {
|
||||
poolConfig.host = config.DATABASE.HOST;
|
||||
this.poolConfig.host = config.DATABASE.HOST;
|
||||
}
|
||||
|
||||
return poolConfig;
|
||||
}
|
||||
|
||||
static pool = createPool(DB.poolConfig());
|
||||
private pool: Pool | null = null;
|
||||
private poolConfig: PoolOptions = {
|
||||
port: config.DATABASE.PORT,
|
||||
database: config.DATABASE.DATABASE,
|
||||
user: config.DATABASE.USERNAME,
|
||||
password: config.DATABASE.PASSWORD,
|
||||
connectionLimit: 10,
|
||||
supportBigNumbers: true,
|
||||
timezone: '+00:00',
|
||||
};
|
||||
|
||||
static connectionsReady: number[] = [];
|
||||
public async query(query, params?) {
|
||||
const pool = await this.getPool();
|
||||
return pool.query(query, params);
|
||||
}
|
||||
|
||||
static async getConnection() {
|
||||
const connection: PoolConnection = await DB.pool.getConnection();
|
||||
const connectionId = connection['connection'].connectionId;
|
||||
if (!DB.connectionsReady.includes(connectionId)) {
|
||||
await connection.query(`SET time_zone='+00:00';`);
|
||||
this.connectionsReady.push(connectionId);
|
||||
public async checkDbConnection() {
|
||||
try {
|
||||
await this.query('SELECT ?', [1]);
|
||||
logger.info('Database connection established.');
|
||||
} catch (e) {
|
||||
logger.err('Could not connect to database: ' + (e instanceof Error ? e.message : e));
|
||||
process.exit(1);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
private async getPool(): Promise<Pool> {
|
||||
if (this.pool === null) {
|
||||
this.pool = createPool(this.poolConfig);
|
||||
this.pool.on('connection', function (newConnection: PoolConnection) {
|
||||
newConnection.query(`SET time_zone='+00:00'`);
|
||||
});
|
||||
}
|
||||
return this.pool;
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkDbConnection() {
|
||||
try {
|
||||
const connection = await DB.getConnection();
|
||||
logger.info('Database connection established.');
|
||||
connection.release();
|
||||
} catch (e) {
|
||||
logger.err('Could not connect to database: ' + (e instanceof Error ? e.message : e));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
export default new DB();
|
||||
|
@ -5,7 +5,7 @@ import * as WebSocket from 'ws';
|
||||
import * as cluster from 'cluster';
|
||||
import axios from 'axios';
|
||||
|
||||
import { checkDbConnection, DB } from './database';
|
||||
import DB from './database';
|
||||
import config from './config';
|
||||
import routes from './routes';
|
||||
import blocks from './api/blocks';
|
||||
@ -89,11 +89,11 @@ class Server {
|
||||
diskCache.loadMempoolCache();
|
||||
|
||||
if (config.DATABASE.ENABLED) {
|
||||
await checkDbConnection();
|
||||
await DB.checkDbConnection();
|
||||
try {
|
||||
if (process.env.npm_config_reindex != undefined) { // Re-index requests
|
||||
const tables = process.env.npm_config_reindex.split(',');
|
||||
logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds from now (using '--reindex') ...`);
|
||||
logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
|
||||
await Common.sleep(5000);
|
||||
await databaseMigration.$truncateIndexedData(tables);
|
||||
}
|
||||
@ -169,8 +169,12 @@ class Server {
|
||||
}
|
||||
|
||||
async $resetHashratesIndexingState() {
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
|
||||
try {
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
|
||||
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
async $runIndexingWhenReady() {
|
||||
@ -184,11 +188,11 @@ class Server {
|
||||
await BlocksRepository.$deleteBlocks(10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
}
|
||||
blocks.$generateBlockDatabase();
|
||||
await blocks.$generateBlockDatabase();
|
||||
await mining.$generateNetworkHashrateHistory();
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
} catch (e) {
|
||||
logger.err(`Unable to run indexing right now, trying again later. ` + e);
|
||||
logger.err(`Indexing failed, trying again later. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { BlockExtended, PoolTag } from '../mempool.interfaces';
|
||||
import { DB } from '../database';
|
||||
import { BlockExtended } from '../mempool.interfaces';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Common } from '../api/common';
|
||||
import { prepareBlock } from '../utils/blocks-utils';
|
||||
@ -10,11 +10,7 @@ class BlocksRepository {
|
||||
* Save indexed block data in the database
|
||||
*/
|
||||
public async $saveBlockInDatabase(block: BlockExtended) {
|
||||
let connection;
|
||||
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
|
||||
const query = `INSERT INTO blocks(
|
||||
height, hash, blockTimestamp, size,
|
||||
weight, tx_count, coinbase_raw, difficulty,
|
||||
@ -52,15 +48,12 @@ class BlocksRepository {
|
||||
block.extras.avgFeeRate,
|
||||
];
|
||||
|
||||
await connection.query(query, params);
|
||||
connection.release();
|
||||
await DB.query(query, params);
|
||||
} catch (e: any) {
|
||||
connection.release();
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
|
||||
} else {
|
||||
connection.release();
|
||||
logger.err('$saveBlockInDatabase() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -74,16 +67,13 @@ class BlocksRepository {
|
||||
return [];
|
||||
}
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows]: any[] = await connection.query(`
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT height
|
||||
FROM blocks
|
||||
WHERE height <= ? AND height >= ?
|
||||
ORDER BY height DESC;
|
||||
`, [startHeight, endHeight]);
|
||||
connection.release();
|
||||
|
||||
const indexedBlockHeights: number[] = [];
|
||||
rows.forEach((row: any) => { indexedBlockHeights.push(row.height); });
|
||||
@ -92,8 +82,7 @@ class BlocksRepository {
|
||||
|
||||
return missingBlocksHeights;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getMissingBlocksBetweenHeights() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot retrieve blocks list to index. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -121,16 +110,11 @@ class BlocksRepository {
|
||||
|
||||
query += ` GROUP by pools.id`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query(query, params);
|
||||
connection.release();
|
||||
|
||||
const [rows] = await DB.query(query, params);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getEmptyBlocks() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot count empty blocks. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -159,16 +143,11 @@ class BlocksRepository {
|
||||
query += ` blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query(query, params);
|
||||
connection.release();
|
||||
|
||||
const [rows] = await DB.query(query, params);
|
||||
return <number>rows[0].blockCount;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$blockCount() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot count blocks for this pool (using offset). Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -199,16 +178,11 @@ class BlocksRepository {
|
||||
}
|
||||
query += ` blockTimestamp BETWEEN FROM_UNIXTIME('${from}') AND FROM_UNIXTIME('${to}')`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query(query, params);
|
||||
connection.release();
|
||||
|
||||
const [rows] = await DB.query(query, params);
|
||||
return <number>rows[0];
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$blockCountBetweenTimestamp() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot count blocks for this pool (using timestamps). Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -222,11 +196,8 @@ class BlocksRepository {
|
||||
ORDER BY height
|
||||
LIMIT 1;`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows]: any[] = await connection.query(query);
|
||||
connection.release();
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
return -1;
|
||||
@ -234,8 +205,7 @@ class BlocksRepository {
|
||||
|
||||
return <number>rows[0].blockTimestamp;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$oldestBlockTimestamp() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot get oldest indexed block timestamp. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -243,7 +213,7 @@ class BlocksRepository {
|
||||
/**
|
||||
* Get blocks mined by a specific mining pool
|
||||
*/
|
||||
public async $getBlocksByPool(slug: string, startHeight: number | undefined = undefined): Promise<object[]> {
|
||||
public async $getBlocksByPool(slug: string, startHeight?: number): Promise<object[]> {
|
||||
const pool = await PoolsRepository.$getPool(slug);
|
||||
if (!pool) {
|
||||
throw new Error(`This mining pool does not exist`);
|
||||
@ -264,21 +234,17 @@ class BlocksRepository {
|
||||
query += ` ORDER BY height DESC
|
||||
LIMIT 10`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query(query, params);
|
||||
connection.release();
|
||||
const [rows] = await DB.query(query, params);
|
||||
|
||||
const blocks: BlockExtended[] = [];
|
||||
for (let block of <object[]>rows) {
|
||||
for (const block of <object[]>rows) {
|
||||
blocks.push(prepareBlock(block));
|
||||
}
|
||||
|
||||
return blocks;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getBlocksByPool() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot get blocks for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -287,10 +253,8 @@ class BlocksRepository {
|
||||
* Get one block by height
|
||||
*/
|
||||
public async $getBlockByHeight(height: number): Promise<object | null> {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows]: any[] = await connection.query(`
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
|
||||
pools.addresses as pool_addresses, pools.regexes as pool_regexes,
|
||||
@ -299,7 +263,6 @@ class BlocksRepository {
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE height = ${height};
|
||||
`);
|
||||
connection.release();
|
||||
|
||||
if (rows.length <= 0) {
|
||||
return null;
|
||||
@ -307,8 +270,7 @@ class BlocksRepository {
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getBlockByHeight() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -351,21 +313,16 @@ class BlocksRepository {
|
||||
ORDER BY t.height
|
||||
`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows]: any[] = await connection.query(query);
|
||||
connection.release();
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
|
||||
for (const row of rows) {
|
||||
delete row['rn'];
|
||||
}
|
||||
|
||||
connection.release();
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getBlocksDifficulty() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot generate difficulty history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -374,10 +331,7 @@ class BlocksRepository {
|
||||
* Get general block stats
|
||||
*/
|
||||
public async $getBlockStats(blockCount: number): Promise<any> {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
|
||||
// We need to use a subquery
|
||||
const query = `
|
||||
SELECT MIN(height) as startBlock, MAX(height) as endBlock, SUM(reward) as totalReward, SUM(fees) as totalFee, SUM(tx_count) as totalTx
|
||||
@ -386,13 +340,11 @@ class BlocksRepository {
|
||||
ORDER by height DESC
|
||||
LIMIT ?) as sub`;
|
||||
|
||||
const [rows]: any = await connection.query(query, [blockCount]);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any = await DB.query(query, [blockCount]);
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getBlockStats() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot generate reward stats. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -401,24 +353,18 @@ class BlocksRepository {
|
||||
* Check if the last 10 blocks chain is valid
|
||||
*/
|
||||
public async $validateRecentBlocks(): Promise<boolean> {
|
||||
let connection;
|
||||
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [lastBlocks] = await connection.query(`SELECT height, hash, previous_block_hash FROM blocks ORDER BY height DESC LIMIT 10`);
|
||||
connection.release();
|
||||
const [lastBlocks]: any[] = await DB.query(`SELECT height, hash, previous_block_hash FROM blocks ORDER BY height DESC LIMIT 10`);
|
||||
|
||||
for (let i = 0; i < lastBlocks.length - 1; ++i) {
|
||||
if (lastBlocks[i].previous_block_hash !== lastBlocks[i + 1].hash) {
|
||||
logger.notice(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
|
||||
logger.warn(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
|
||||
return true; // Don't do anything if there is a db error
|
||||
}
|
||||
}
|
||||
@ -427,27 +373,20 @@ class BlocksRepository {
|
||||
* Delete $count blocks from the database
|
||||
*/
|
||||
public async $deleteBlocks(count: number) {
|
||||
let connection;
|
||||
logger.info(`Delete ${count} most recent indexed blocks from the database`);
|
||||
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
logger.debug(`Delete ${count} most recent indexed blocks from the database`);
|
||||
await connection.query(`DELETE FROM blocks ORDER BY height DESC LIMIT ${count};`);
|
||||
await DB.query(`DELETE FROM blocks ORDER BY height DESC LIMIT ${count};`);
|
||||
} catch (e) {
|
||||
logger.err('$deleteBlocks() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot delete recent indexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
connection.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the historical averaged block reward and total fees
|
||||
* Get the historical averaged block fees
|
||||
*/
|
||||
public async $getHistoricalBlockFees(div: number, interval: string | null): Promise<any> {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
|
||||
let query = `SELECT CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(fees) as INT) as avg_fees
|
||||
FROM blocks`;
|
||||
@ -458,13 +397,10 @@ class BlocksRepository {
|
||||
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(blockTimestamp) DIV ${div}`;
|
||||
|
||||
const [rows]: any = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getHistoricalBlockFees() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot generate block fees history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -473,10 +409,7 @@ class BlocksRepository {
|
||||
* Get the historical averaged block rewards
|
||||
*/
|
||||
public async $getHistoricalBlockRewards(div: number, interval: string | null): Promise<any> {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
|
||||
let query = `SELECT CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(reward) as INT) as avg_rewards
|
||||
FROM blocks`;
|
||||
@ -487,13 +420,10 @@ class BlocksRepository {
|
||||
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(blockTimestamp) DIV ${div}`;
|
||||
|
||||
const [rows]: any = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getHistoricalBlockRewards() error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot generate block rewards history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Common } from '../api/common';
|
||||
import { DB } from '../database';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import PoolsRepository from './PoolsRepository';
|
||||
|
||||
@ -20,14 +20,10 @@ class HashratesRepository {
|
||||
}
|
||||
query = query.slice(0, -1);
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
await connection.query(query);
|
||||
connection.release();
|
||||
await DB.query(query);
|
||||
} catch (e: any) {
|
||||
connection.release();
|
||||
logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -35,8 +31,6 @@ class HashratesRepository {
|
||||
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate
|
||||
FROM hashrates`;
|
||||
|
||||
@ -50,33 +44,25 @@ class HashratesRepository {
|
||||
query += ` ORDER by hashrate_timestamp`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getNetworkDailyHashrate() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getWeeklyHashrateTimestamps(): Promise<number[]> {
|
||||
const connection = await DB.getConnection();
|
||||
|
||||
const query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp
|
||||
FROM hashrates
|
||||
WHERE type = 'weekly'
|
||||
GROUP BY hashrate_timestamp`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows.map(row => row.timestamp);
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getWeeklyHashrateTimestamps() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -87,7 +73,6 @@ class HashratesRepository {
|
||||
public async $getPoolsWeeklyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
const topPoolsId = (await PoolsRepository.$getPoolsInfo('1w')).map((pool) => pool.poolId);
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate, share, pools.name as poolName
|
||||
@ -106,13 +91,10 @@ class HashratesRepository {
|
||||
query += ` ORDER by hashrate_timestamp, FIELD(pool_id, ${topPoolsId})`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPoolsWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -128,8 +110,8 @@ class HashratesRepository {
|
||||
|
||||
// Find hashrate boundaries
|
||||
let query = `SELECT MIN(hashrate_timestamp) as firstTimestamp, MAX(hashrate_timestamp) as lastTimestamp
|
||||
FROM hashrates
|
||||
JOIN pools on pools.id = pool_id
|
||||
FROM hashrates
|
||||
JOIN pools on pools.id = pool_id
|
||||
WHERE hashrates.type = 'weekly' AND pool_id = ? AND avg_hashrate != 0
|
||||
ORDER by hashrate_timestamp LIMIT 1`;
|
||||
|
||||
@ -138,15 +120,11 @@ class HashratesRepository {
|
||||
lastTimestamp: '9999-01-01'
|
||||
};
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows]: any[] = await connection.query(query, [pool.id]);
|
||||
const [rows]: any[] = await DB.query(query, [pool.id]);
|
||||
boundaries = rows[0];
|
||||
connection.release();
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPoolWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
// Get hashrates entries between boundaries
|
||||
@ -158,13 +136,10 @@ class HashratesRepository {
|
||||
ORDER by hashrate_timestamp`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await connection.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
|
||||
connection.release();
|
||||
|
||||
const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPoolWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -173,14 +148,13 @@ class HashratesRepository {
|
||||
* Set latest run timestamp
|
||||
*/
|
||||
public async $setLatestRunTimestamp(key: string, val: any = null) {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `UPDATE state SET number = ? WHERE name = ?`;
|
||||
|
||||
try {
|
||||
await connection.query<any>(query, (val === null) ? [Math.round(new Date().getTime() / 1000), key] : [val, key]);
|
||||
connection.release();
|
||||
await DB.query(query, (val === null) ? [Math.round(new Date().getTime() / 1000), key] : [val, key]);
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err(`Cannot set last indexing timestamp for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@ -188,20 +162,17 @@ class HashratesRepository {
|
||||
* Get latest run timestamp
|
||||
*/
|
||||
public async $getLatestRunTimestamp(key: string): Promise<number> {
|
||||
const connection = await DB.getConnection();
|
||||
const query = `SELECT number FROM state WHERE name = ?`;
|
||||
|
||||
try {
|
||||
const [rows] = await connection.query<any>(query, [key]);
|
||||
connection.release();
|
||||
const [rows]: any[] = await DB.query(query, [key]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
return rows[0]['number'];
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$setLatestRunTimestamp() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot retreive last indexing timestamp for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -210,23 +181,19 @@ class HashratesRepository {
|
||||
* Delete most recent data points for re-indexing
|
||||
*/
|
||||
public async $deleteLastEntries() {
|
||||
logger.debug(`Delete latest hashrates data points from the database`);
|
||||
logger.info(`Delete latest hashrates data points from the database`);
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
|
||||
const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
|
||||
for (const row of rows) {
|
||||
await connection.query(`DELETE FROM hashrates WHERE hashrate_timestamp = ?`, [row.timestamp]);
|
||||
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp = ?`, [row.timestamp]);
|
||||
}
|
||||
// Re-run the hashrate indexing to fill up missing data
|
||||
await this.$setLatestRunTimestamp('last_hashrates_indexing', 0);
|
||||
await this.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
|
||||
} catch (e) {
|
||||
logger.err('$deleteLastEntries() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Common } from '../api/common';
|
||||
import config from '../config';
|
||||
import { DB } from '../database';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { PoolInfo, PoolTag } from '../mempool.interfaces';
|
||||
|
||||
@ -9,9 +9,7 @@ class PoolsRepository {
|
||||
* Get all pools tagging info
|
||||
*/
|
||||
public async $getPools(): Promise<PoolTag[]> {
|
||||
const connection = await DB.getConnection();
|
||||
const [rows] = await connection.query('SELECT id, name, addresses, regexes, slug FROM pools;');
|
||||
connection.release();
|
||||
const [rows] = await DB.query('SELECT id, name, addresses, regexes, slug FROM pools;');
|
||||
return <PoolTag[]>rows;
|
||||
}
|
||||
|
||||
@ -19,9 +17,7 @@ class PoolsRepository {
|
||||
* Get unknown pool tagging info
|
||||
*/
|
||||
public async $getUnknownPool(): Promise<PoolTag> {
|
||||
const connection = await DB.getConnection();
|
||||
const [rows] = await connection.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
||||
connection.release();
|
||||
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
||||
return <PoolTag>rows[0];
|
||||
}
|
||||
|
||||
@ -42,16 +38,11 @@ class PoolsRepository {
|
||||
query += ` GROUP BY pool_id
|
||||
ORDER BY COUNT(height) DESC`;
|
||||
|
||||
// logger.debug(query);
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const [rows] = await connection.query(query);
|
||||
connection.release();
|
||||
|
||||
const [rows] = await DB.query(query);
|
||||
return <PoolInfo[]>rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPoolsInfo() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot generate pools stats. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -65,15 +56,11 @@ class PoolsRepository {
|
||||
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
|
||||
GROUP BY pools.id`;
|
||||
|
||||
const connection = await DB.getConnection();
|
||||
try {
|
||||
const [rows] = await connection.query(query, [from, to]);
|
||||
connection.release();
|
||||
|
||||
const [rows] = await DB.query(query, [from, to]);
|
||||
return <PoolInfo[]>rows;
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPoolsInfoBetween() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot generate pools blocks count. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -87,15 +74,11 @@ class PoolsRepository {
|
||||
FROM pools
|
||||
WHERE pools.slug = ?`;
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
|
||||
const [rows] = await connection.query(query, [slug]);
|
||||
connection.release();
|
||||
const [rows]: any[] = await DB.query(query, [slug]);
|
||||
|
||||
if (rows.length < 1) {
|
||||
logger.debug(`$getPool(): slug does not match any known pool`);
|
||||
logger.debug(`This slug does not match any known pool`);
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -108,8 +91,7 @@ class PoolsRepository {
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.err('$getPool() error' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
const https = require('https');
|
||||
import poolsParser from "../api/pools-parser";
|
||||
import config from "../config";
|
||||
import { DB } from "../database";
|
||||
import logger from "../logger";
|
||||
import poolsParser from '../api/pools-parser';
|
||||
import config from '../config';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
/**
|
||||
* Maintain the most recent version of pools.json
|
||||
@ -48,7 +48,7 @@ class PoolsUpdater {
|
||||
|
||||
} catch (e) {
|
||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||
logger.err('PoolsUpdater failed. Will try again in 24h. Error: ' + e);
|
||||
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
@ -64,15 +64,11 @@ class PoolsUpdater {
|
||||
* Fetch our latest pools.json sha from the db
|
||||
*/
|
||||
private async updateDBSha(githubSha: string) {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
await connection.query('DELETE FROM state where name="pools_json_sha"');
|
||||
await connection.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||
connection.release();
|
||||
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
||||
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||
} catch (e) {
|
||||
logger.err('Unable save github pools.json sha into the DB, error: ' + e);
|
||||
connection.release();
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@ -81,15 +77,11 @@ class PoolsUpdater {
|
||||
* Fetch our latest pools.json sha from the db
|
||||
*/
|
||||
private async getShaFromDb(): Promise<string | undefined> {
|
||||
let connection;
|
||||
try {
|
||||
connection = await DB.getConnection();
|
||||
const [rows] = await connection.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
connection.release();
|
||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
return (rows.length > 0 ? rows[0].string : undefined);
|
||||
} catch (e) {
|
||||
logger.err('Unable fetch pools.json sha from DB, error: ' + e);
|
||||
connection.release();
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@ -106,7 +98,7 @@ class PoolsUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
logger.err('Unable to find latest pools.json sha from github');
|
||||
logger.err('Cannot to find latest pools.json sha from github api response');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@ -138,9 +130,9 @@ class PoolsUpdater {
|
||||
});
|
||||
|
||||
request.on('error', (error) => {
|
||||
logger.err('Query failed with error: ' + error);
|
||||
logger.err('Github API query failed. Reason: ' + error);
|
||||
reject(error);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -137,13 +137,17 @@ let routes: Routes = [
|
||||
path: 'docs/api/:type',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/faq',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/api',
|
||||
redirectTo: 'docs/api/rest'
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
redirectTo: 'docs/api/rest'
|
||||
redirectTo: 'docs/faq'
|
||||
},
|
||||
{
|
||||
path: 'api',
|
||||
@ -276,13 +280,17 @@ let routes: Routes = [
|
||||
path: 'docs/api/:type',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/faq',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/api',
|
||||
redirectTo: 'docs/api/rest'
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
redirectTo: 'docs/api/rest'
|
||||
redirectTo: 'docs/faq'
|
||||
},
|
||||
{
|
||||
path: 'api',
|
||||
@ -408,13 +416,17 @@ let routes: Routes = [
|
||||
path: 'docs/api/:type',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/faq',
|
||||
component: DocsComponent
|
||||
},
|
||||
{
|
||||
path: 'docs/api',
|
||||
redirectTo: 'docs/api/rest'
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
redirectTo: 'docs/api/rest'
|
||||
redirectTo: 'docs/faq'
|
||||
},
|
||||
{
|
||||
path: 'api',
|
||||
|
@ -56,6 +56,7 @@ import { faFilter, faAngleDown, faAngleUp, faAngleRight, faAngleLeft, faBolt, fa
|
||||
import { ApiDocsComponent } from './components/docs/api-docs.component';
|
||||
import { DocsComponent } from './components/docs/docs.component';
|
||||
import { ApiDocsNavComponent } from './components/docs/api-docs-nav.component';
|
||||
import { NoSanitizePipe } from './shared/pipes/no-sanitize.pipe';
|
||||
import { CodeTemplateComponent } from './components/docs/code-template.component';
|
||||
import { TermsOfServiceComponent } from './components/terms-of-service/terms-of-service.component';
|
||||
import { PrivacyPolicyComponent } from './components/privacy-policy/privacy-policy.component';
|
||||
@ -121,6 +122,7 @@ import { BlockRewardsGraphComponent } from './components/block-rewards-graph/blo
|
||||
DashboardComponent,
|
||||
DifficultyComponent,
|
||||
ApiDocsComponent,
|
||||
NoSanitizePipe,
|
||||
CodeTemplateComponent,
|
||||
TermsOfServiceComponent,
|
||||
PrivacyPolicyComponent,
|
||||
|
@ -4411,3 +4411,177 @@ export const restApiDocsData = [
|
||||
},
|
||||
];
|
||||
|
||||
export const faqData = [
|
||||
{
|
||||
type: "category",
|
||||
category: "basics",
|
||||
fragment: "basics",
|
||||
title: "Basics",
|
||||
showConditions: bitcoinNetworks
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-is-a-mempool",
|
||||
title: "What is a mempool?",
|
||||
answer: "<p>A mempool (short for \"memory pool\") holds the queue of pending and unconfirmed transactions for a cryptocurrency network node. There is no one global mempool: every node on the network maintains its own mempool, so different nodes may hold different transactions in their mempools.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-is-a-mempool-explorer",
|
||||
title: "What is a mempool explorer?",
|
||||
answer: "<p>A mempool explorer is a tool that enables you to view real-time and historical information about a node's mempool, visualize its transactions, and search and view those transactions.</p><p>The mempool.space website invented the concept of visualizing a Bitcoin node's mempool as <b>projected blocks</b>. These blocks are the inspiration for our half-filled block logo.</p><p>Projected blocks are on the left of the dotted white line, and confirmed blocks are on the right.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-is-a-blockchain",
|
||||
title: "What is a blockchain?",
|
||||
answer: "<p>A blockchain is a distributed ledger that records the transactions for a cryptocurrency network. Miners amend the blockchain ledger by mining new blocks.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-is-a-block-explorer",
|
||||
title: "What is a block explorer?",
|
||||
answer: "<p>A block explorer is a tool that enables you to explore real-time and historical information about the blockchain of a cryptocurrency. This includes data related to blocks, transactions, addresses, and more.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-is-mining",
|
||||
title: "What is mining?",
|
||||
answer: "Mining is the process by which unconfirmed transactions in a mempool are confirmed into a block on a blockchain. Miners select unconfirmed transactions from their mempools and arrange them into a block such that they solve a particular math problem.</p><p>The first miner on the network to find a suitable block earns all the transaction fees from the transactions in that block. As a result, miners tend to prioritize transactions with higher transaction fees.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "basics",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "what-are-mining-pools",
|
||||
title: "What are mining pools?",
|
||||
answer: "Mining pools are groups of miners that combine their computational power in order to increase the probability of finding new blocks."
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
category: "help",
|
||||
fragment: "help-stuck-transaction",
|
||||
title: "Help! My transaction is stuck",
|
||||
showConditions: bitcoinNetworks
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "help",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "why-is-transaction-stuck-in-mempool",
|
||||
title: "Why is my transaction stuck in the mempool?",
|
||||
answer: "<p>Miners decide which transactions are included in the blocks they mine, so they usually prioritize transactions which pay them the highest transaction fees (transaction fees are measured in sats per virtual byte, or sat/vB). If it's been a while and your transcation hasn't been confirmed, your transaction probably has a lower transaction fee relative to other transactions currently in the mempool.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "help",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "how-to-get-transaction-confirmed-quickly",
|
||||
title: "How can I get my transaction confirmed more quickly?",
|
||||
answer: "<p>If your wallet supports RBF, and if your transaction was created with RBF enabled, you can bump the fee higher.</p><p>Otherwise, if your wallet does not support RBF, you can increase the effective fee rate of your transaction by spending its change output using a higher fee. This is called CPFP.</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "help",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "how-prevent-stuck-transaction",
|
||||
title: "How can I prevent a transaction from getting stuck in the future?",
|
||||
answer: "<p>You must use an adequate transaction fee commensurate with how quickly you need the transaction to be confirmed. Also consider using RBF if your wallet supports it so that you can bump the fee rate if needed.</p>"
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
category: "using",
|
||||
fragment: "using-this-website",
|
||||
title: "Using this website",
|
||||
showConditions: bitcoinNetworks
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "how-to",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "looking-up-transactions",
|
||||
title: "How can I look up a transaction?",
|
||||
answer: "Search for the transaction ID in the search box at the top-right of this website."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "how-to",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "looking-up-addresses",
|
||||
title: "How can I look up an address?",
|
||||
answer: "Search for the address in the search box at the top-right of this website."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "how-to",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "looking-up-blocks",
|
||||
title: "How can I look up a block?",
|
||||
answer: "Search for the block number (or block hash) in the search box at the top-right of this website."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "how-to",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "looking-up-fee-estimates",
|
||||
title: "How can I look up fee estimates?",
|
||||
answer: "<p>See real-time fee estimates on <a href='/'>the main dashboard</a>.</p><p>Low priority is suggested for confirmation within 6 blocks (~1 hour), Medium priority is suggested for confirmation within 3 blocks (~30 minutes), and High priority is suggested for confirmation in the next block (~10 minutes).</p>"
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "how-to",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "looking-up-historical-trends",
|
||||
title: "How can I explore historical trends?",
|
||||
answer: "See the <a href='/graphs'>graphs page</a> for aggregate trends over time: mempool size over time and incoming transaction velocity over time."
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
category: "advanced",
|
||||
fragment: "advanced",
|
||||
title: "Advanced",
|
||||
showConditions: bitcoinNetworks
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "advanced",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "who-runs-this-website",
|
||||
title: "Who runs this website?",
|
||||
answer: "The official mempool.space website is operated by The Mempool Open Source Project. See more information on our <a href='/about'>About page</a>. There are also many unofficial instances of this website operated by individual members of the Bitcoin community."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "advanced",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "host-my-own-instance-raspberry-pi",
|
||||
title: "How can I host my own instance on a Raspberry Pi?",
|
||||
answer: "We support one-click installation on a number of Raspberry Pi full-node distros including Umbrel, RaspiBlitz, MyNode, and RoninDojo."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "advanced",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "host-my-own-instance-linux-server",
|
||||
title: "How can I host my own instance on a Linux server?",
|
||||
answer: "You can manually install mempool on your own Linux server, but this requires advanced sysadmin skills since you will be manually configuring everything. We do not provide support for manual deployments."
|
||||
},
|
||||
{
|
||||
type: "endpoint",
|
||||
category: "advanced",
|
||||
showConditions: bitcoinNetworks,
|
||||
fragment: "install-mempool-with-docker",
|
||||
title: "Can I install Mempool using Docker?",
|
||||
answer: "Yes, we publish Docker images (or you can build your own), and provide <a href='https://github.com/mempool/mempool/tree/master/docker' target='_blank'>an example docker-compose template</a>."
|
||||
}
|
||||
];
|
||||
|
@ -1,4 +1,4 @@
|
||||
<div *ngFor="let item of restDocs">
|
||||
<div *ngFor="let item of tabData">
|
||||
<p *ngIf="( item.type === 'category' ) && ( item.showConditions.indexOf(network.val) > -1 )">{{ item.title }}</p>
|
||||
<a *ngIf="( item.type !== 'category' ) && ( item.showConditions.indexOf(network.val) > -1 )" [routerLink]="['./']" fragment="{{ item.fragment }}" (click)="navLinkClick($event)">{{ item.title }}</a>
|
||||
</div>
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core';
|
||||
import { restApiDocsData } from './api-docs-data';
|
||||
import { faqData } from './api-docs-data';
|
||||
|
||||
@Component({
|
||||
selector: 'app-api-docs-nav',
|
||||
@ -9,13 +10,18 @@ import { restApiDocsData } from './api-docs-data';
|
||||
export class ApiDocsNavComponent implements OnInit {
|
||||
|
||||
@Input() network: any;
|
||||
@Input() whichTab: string;
|
||||
@Output() navLinkClickEvent: EventEmitter<any> = new EventEmitter();
|
||||
restDocs: any[];
|
||||
tabData: any[];
|
||||
|
||||
constructor() { }
|
||||
|
||||
ngOnInit(): void {
|
||||
this.restDocs = restApiDocsData;
|
||||
if( this.whichTab === 'rest' ) {
|
||||
this.tabData = restApiDocsData;
|
||||
} else if( this.whichTab = 'faq' ) {
|
||||
this.tabData = faqData;
|
||||
}
|
||||
}
|
||||
|
||||
navLinkClick( event ) {
|
||||
|
@ -1,20 +1,45 @@
|
||||
<ng-container *ngIf="{ val: network$ | async } as network">
|
||||
<div class="container-xl text-left">
|
||||
|
||||
<div id="restAPI" *ngIf="restTabActivated">
|
||||
<div id="faq" *ngIf="whichTab === 'faq'">
|
||||
|
||||
<div id="doc-nav-desktop" class="hide-on-mobile" [ngClass]="desktopDocsNavPosition">
|
||||
<app-api-docs-nav (navLinkClickEvent)="anchorLinkClick( $event )" [network]="{ val: network$ | async }"></app-api-docs-nav>
|
||||
<app-api-docs-nav (navLinkClickEvent)="anchorLinkClick( $event )" [network]="{ val: network$ | async }" [whichTab]="whichTab"></app-api-docs-nav>
|
||||
</div>
|
||||
|
||||
<div class="doc-content">
|
||||
|
||||
<div class="doc-item-container" *ngFor="let item of faq">
|
||||
<h3 *ngIf="item.type === 'category'">{{ item.title }}</h3>
|
||||
<div *ngIf="item.type !== 'category'" class="endpoint-container" id="{{ item.fragment }}">
|
||||
<a id="{{ item.fragment + '-tab-header' }}" class="section-header" (click)="anchorLinkClick( $event )" [routerLink]="['./']" fragment="{{ item.fragment }}"><table><tr><td>{{ item.title }}</td><td><span>{{ item.category }}</span></td></tr></table></a>
|
||||
<div class="endpoint-content">
|
||||
<div class="endpoint" [innerHTML]="item.answer | noSanitize"></div>
|
||||
<div class="blockchain-wrapper" *ngIf="item.fragment === 'what-is-a-mempool-explorer'">
|
||||
<app-blockchain></app-blockchain>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div id="restAPI" *ngIf="whichTab === 'rest'">
|
||||
|
||||
<div id="doc-nav-desktop" class="hide-on-mobile" [ngClass]="desktopDocsNavPosition">
|
||||
<app-api-docs-nav (navLinkClickEvent)="anchorLinkClick( $event )" [network]="{ val: network$ | async }" [whichTab]="whichTab"></app-api-docs-nav>
|
||||
</div>
|
||||
|
||||
<div class="doc-content">
|
||||
|
||||
<p class="hide-on-mobile no-bottom-space">Reference for the {{ network.val === '' ? 'Bitcoin' : network.val.charAt(0).toUpperCase() + network.val.slice(1) }} <ng-container i18n="api-docs.title">API service</ng-container>.</p>
|
||||
|
||||
<div *ngFor="let item of restDocs">
|
||||
<div class="doc-item-container" *ngFor="let item of restDocs">
|
||||
<h3 *ngIf="( item.type === 'category' ) && ( item.showConditions.indexOf(network.val) > -1 )">{{ item.title }}</h3>
|
||||
<div *ngIf="( item.type !== 'category' ) && ( item.showConditions.indexOf(network.val) > -1 )" class="endpoint-container" id="{{ item.fragment }}">
|
||||
<a class="section-header" (click)="anchorLinkClick( $event )" [routerLink]="['./']" fragment="{{ item.fragment }}">{{ item.title }} <span>{{ item.category }}</span></a>
|
||||
<a id="{{ item.fragment + '-tab-header' }}" class="section-header" (click)="anchorLinkClick( $event )" [routerLink]="['./']" fragment="{{ item.fragment }}">{{ item.title }} <span>{{ item.category }}</span></a>
|
||||
<div class="endpoint-content">
|
||||
<div class="endpoint">
|
||||
<div class="subtitle" i18n="Api docs endpoint">Endpoint</div>
|
||||
@ -65,7 +90,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="websocketAPI" *ngIf="!restTabActivated && ( network.val !== 'bisq' )">
|
||||
<div id="websocketAPI" *ngIf="( whichTab === 'websocket' ) && ( network.val !== 'bisq' )">
|
||||
<div class="api-category">
|
||||
<div class="websocket">
|
||||
<div class="endpoint">
|
||||
|
@ -152,6 +152,14 @@ h3 {
|
||||
float: right;
|
||||
}
|
||||
|
||||
.endpoint-container .section-header table {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.endpoint-container .section-header table td:first-child {
|
||||
padding-right: 24px;
|
||||
}
|
||||
|
||||
#doc-nav-mobile {
|
||||
position: fixed;
|
||||
top: 20px;
|
||||
@ -179,6 +187,16 @@ h3 {
|
||||
border-radius: 0.5rem 0.5rem 0 0;
|
||||
}
|
||||
|
||||
.blockchain-wrapper {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
overflow: auto;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.blockchain-wrapper::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@media (max-width: 992px) {
|
||||
|
||||
.hide-on-mobile {
|
||||
@ -231,4 +249,8 @@ h3 {
|
||||
h3 {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.doc-item-container:last-of-type .endpoint-container {
|
||||
margin-bottom: 4rem;
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import { Observable, merge, of } from 'rxjs';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { tap } from 'rxjs/operators';
|
||||
import { ActivatedRoute } from "@angular/router";
|
||||
import { restApiDocsData, wsApiDocsData } from './api-docs-data';
|
||||
import { faqData, restApiDocsData, wsApiDocsData } from './api-docs-data';
|
||||
|
||||
@Component({
|
||||
selector: 'app-api-docs',
|
||||
@ -18,8 +18,9 @@ export class ApiDocsComponent implements OnInit {
|
||||
env: Env;
|
||||
code: any;
|
||||
baseNetworkUrl = '';
|
||||
@Input() restTabActivated: Boolean;
|
||||
@Input() whichTab: string;
|
||||
desktopDocsNavPosition = "relative";
|
||||
faq: any[];
|
||||
restDocs: any[];
|
||||
wsDocs: any;
|
||||
screenWidth: number;
|
||||
@ -33,7 +34,9 @@ export class ApiDocsComponent implements OnInit {
|
||||
ngAfterViewInit() {
|
||||
const that = this;
|
||||
setTimeout( () => {
|
||||
this.openEndpointContainer( this.route.snapshot.fragment );
|
||||
if( this.route.snapshot.fragment ) {
|
||||
this.openEndpointContainer( this.route.snapshot.fragment );
|
||||
}
|
||||
window.addEventListener('scroll', function() {
|
||||
that.desktopDocsNavPosition = ( window.pageYOffset > 182 ) ? "fixed" : "relative";
|
||||
});
|
||||
@ -62,6 +65,7 @@ export class ApiDocsComponent implements OnInit {
|
||||
|
||||
this.hostname = `${document.location.protocol}//${this.hostname}`;
|
||||
|
||||
this.faq = faqData;
|
||||
this.restDocs = restApiDocsData;
|
||||
this.wsDocs = wsApiDocsData;
|
||||
|
||||
@ -71,7 +75,16 @@ export class ApiDocsComponent implements OnInit {
|
||||
}
|
||||
|
||||
anchorLinkClick( event: any ) {
|
||||
const targetId = event.target.hash.substring(1);
|
||||
let targetId = "";
|
||||
if( event.target.nodeName === "A" ) {
|
||||
targetId = event.target.hash.substring(1);
|
||||
} else {
|
||||
let element = event.target;
|
||||
while( element.nodeName !== "A" ) {
|
||||
element = element.parentElement;
|
||||
}
|
||||
targetId = element.hash.substring(1);
|
||||
}
|
||||
if( this.route.snapshot.fragment === targetId ) {
|
||||
document.getElementById( targetId ).scrollIntoView();
|
||||
}
|
||||
@ -79,7 +92,8 @@ export class ApiDocsComponent implements OnInit {
|
||||
}
|
||||
|
||||
openEndpointContainer( targetId ) {
|
||||
if( ( window.innerWidth <= 992 ) && this.restTabActivated && targetId ) {
|
||||
const tabHeaderHeight = document.getElementById( targetId + "-tab-header" ).scrollHeight;
|
||||
if( ( window.innerWidth <= 992 ) && ( ( this.whichTab === 'rest' ) || ( this.whichTab === 'faq' ) ) && targetId ) {
|
||||
const endpointContainerEl = document.querySelector<HTMLElement>( "#" + targetId );
|
||||
const endpointContentEl = document.querySelector<HTMLElement>( "#" + targetId + " .endpoint-content" );
|
||||
const endPointContentElHeight = endpointContentEl.clientHeight;
|
||||
@ -90,8 +104,8 @@ export class ApiDocsComponent implements OnInit {
|
||||
endpointContentEl.style.opacity = "0";
|
||||
endpointContentEl.classList.remove( "open" );
|
||||
} else {
|
||||
endpointContainerEl.style.height = endPointContentElHeight + 90 + "px";
|
||||
endpointContentEl.style.top = "90px";
|
||||
endpointContainerEl.style.height = endPointContentElHeight + tabHeaderHeight + 28 + "px";
|
||||
endpointContentEl.style.top = tabHeaderHeight + 28 + "px";
|
||||
endpointContentEl.style.opacity = "1";
|
||||
endpointContentEl.classList.add( "open" );
|
||||
}
|
||||
|
@ -5,20 +5,29 @@
|
||||
|
||||
<ul ngbNav #nav="ngbNav" [(activeId)]="activeTab" class="nav-tabs">
|
||||
|
||||
<li [ngbNavItem]="0">
|
||||
<a ngbNavLink routerLink="../rest">API - REST</a>
|
||||
<li [ngbNavItem]="0" *ngIf="showFaqTab">
|
||||
<a ngbNavLink [routerLink]="['/docs/faq' | relativeUrl]">FAQ</a>
|
||||
<ng-template ngbNavContent>
|
||||
|
||||
<app-api-docs [restTabActivated]="true"></app-api-docs>
|
||||
<app-api-docs [whichTab]="'faq'"></app-api-docs>
|
||||
|
||||
</ng-template>
|
||||
</li>
|
||||
|
||||
<li [ngbNavItem]="1" *ngIf="showWebSocketTab">
|
||||
<a ngbNavLink routerLink="../websocket">API - WebSocket</a>
|
||||
<li [ngbNavItem]="1">
|
||||
<a ngbNavLink [routerLink]="['/docs/api/rest' | relativeUrl]">API - REST</a>
|
||||
<ng-template ngbNavContent>
|
||||
|
||||
<app-api-docs [restTabActivated]="false"></app-api-docs>
|
||||
<app-api-docs [whichTab]="'rest'"></app-api-docs>
|
||||
|
||||
</ng-template>
|
||||
</li>
|
||||
|
||||
<li [ngbNavItem]="2" *ngIf="showWebSocketTab">
|
||||
<a ngbNavLink [routerLink]="['/docs/api/websocket' | relativeUrl]">API - WebSocket</a>
|
||||
<ng-template ngbNavContent>
|
||||
|
||||
<app-api-docs [whichTab]="'websocket'"></app-api-docs>
|
||||
|
||||
</ng-template>
|
||||
</li>
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { Component, OnInit, HostBinding } from '@angular/core';
|
||||
import { ActivatedRoute } from '@angular/router';
|
||||
import { Env, StateService } from 'src/app/services/state.service';
|
||||
import { WebsocketService } from 'src/app/services/websocket.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-docs',
|
||||
@ -12,19 +13,31 @@ export class DocsComponent implements OnInit {
|
||||
activeTab = 0;
|
||||
env: Env;
|
||||
showWebSocketTab = true;
|
||||
showFaqTab = true;
|
||||
|
||||
@HostBinding('attr.dir') dir = 'ltr';
|
||||
|
||||
constructor(
|
||||
private route: ActivatedRoute,
|
||||
private stateService: StateService,
|
||||
private websocket: WebsocketService,
|
||||
) { }
|
||||
|
||||
ngOnInit(): void {
|
||||
this.websocket.want(['blocks']);
|
||||
const url = this.route.snapshot.url;
|
||||
this.activeTab = ( url[2].path === "rest" ) ? 0 : 1;
|
||||
if( url[1].path === "faq" ) {
|
||||
this.activeTab = 0;
|
||||
} else if( url[2].path === "rest" ) {
|
||||
this.activeTab = 1;
|
||||
} else {
|
||||
this.activeTab = 2;
|
||||
}
|
||||
|
||||
this.env = this.stateService.env;
|
||||
this.showWebSocketTab = ( ! ( ( this.env.BASE_MODULE === "bisq" ) || ( this.stateService.network === "bisq" ) || ( this.stateService.network === "liquidtestnet" ) ) );
|
||||
this.showWebSocketTab = ( ! ( ( this.stateService.network === "bisq" ) || ( this.stateService.network === "liquidtestnet" ) ) );
|
||||
this.showFaqTab = ( this.env.BASE_MODULE === 'mempool' ) ? true : false;
|
||||
|
||||
document.querySelector<HTMLElement>( "html" ).style.scrollBehavior = "smooth";
|
||||
}
|
||||
|
||||
|
@ -111,7 +111,10 @@
|
||||
<td style="text-align: left;" [innerHTML]="vin.inner_redeemscript_asm | asmStyler"></td>
|
||||
</tr>
|
||||
<tr *ngIf="vin.inner_witnessscript_asm">
|
||||
<td i18n="transactions-list.p2wsh-witness-script">P2WSH witness script</td>
|
||||
<td *ngIf="vin.prevout && vin.prevout.scriptpubkey_type == 'v1_p2tr'; else p2wsh" i18n="transactions-list.p2tr-tapscript">P2TR tapscript</td>
|
||||
<ng-template #p2wsh>
|
||||
<td i18n="transactions-list.p2wsh-witness-script">P2WSH witness script</td>
|
||||
</ng-template>
|
||||
<td style="text-align: left;" [innerHTML]="vin.inner_witnessscript_asm | asmStyler"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
@ -281,6 +281,7 @@ export class AsmStylerPipe implements PipeTransform {
|
||||
case 'CHECKSIGVERIFY':
|
||||
case 'CHECKMULTISIG':
|
||||
case 'CHECKMULTISIGVERIFY':
|
||||
case 'CHECKSIGADD':
|
||||
style = 'crypto';
|
||||
break;
|
||||
|
||||
|
11
frontend/src/app/shared/pipes/no-sanitize.pipe.ts
Normal file
11
frontend/src/app/shared/pipes/no-sanitize.pipe.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { Pipe, PipeTransform } from '@angular/core';
|
||||
import { DomSanitizer, SafeHtml } from '@angular/platform-browser';
|
||||
|
||||
@Pipe({ name: 'noSanitize' })
|
||||
export class NoSanitizePipe implements PipeTransform {
|
||||
constructor(private domSanitizer: DomSanitizer) { }
|
||||
|
||||
transform(html: string): SafeHtml {
|
||||
return this.domSanitizer.bypassSecurityTrustHtml(html);
|
||||
}
|
||||
}
|
@ -1,2 +1,2 @@
|
||||
/var/log/mempool 640 10 * 168 J
|
||||
/var/log/mempool.debug 640 10 1000 * J
|
||||
/var/log/mempool 640 10 * @T00 C
|
||||
/var/log/mempool.debug 640 10 * @T00 C
|
||||
|
Loading…
Reference in New Issue
Block a user