mirror of
https://github.com/mempool/mempool.git
synced 2025-02-24 14:50:52 +01:00
Merge pull request #2869 from mempool/nymkappa/feature/rewrite-pool-parser
Rewrite mining pools parser
This commit is contained in:
commit
b7e6b6da13
6 changed files with 272 additions and 261 deletions
|
@ -36,7 +36,7 @@ describe('Mempool Backend Config', () => {
|
||||||
USER_AGENT: 'mempool',
|
USER_AGENT: 'mempool',
|
||||||
STDOUT_LOG_MIN_PRIORITY: 'debug',
|
STDOUT_LOG_MIN_PRIORITY: 'debug',
|
||||||
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
|
||||||
AUDIT: false,
|
AUDIT: false,
|
||||||
ADVANCED_GBT_AUDIT: false,
|
ADVANCED_GBT_AUDIT: false,
|
||||||
ADVANCED_GBT_MEMPOOL: false,
|
ADVANCED_GBT_MEMPOOL: false,
|
||||||
|
|
|
@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
|
||||||
import { RowDataPacket } from 'mysql2';
|
import { RowDataPacket } from 'mysql2';
|
||||||
|
|
||||||
class DatabaseMigration {
|
class DatabaseMigration {
|
||||||
private static currentVersion = 55;
|
private static currentVersion = 56;
|
||||||
private queryTimeout = 3600_000;
|
private queryTimeout = 3600_000;
|
||||||
private statisticsAddedIndexed = false;
|
private statisticsAddedIndexed = false;
|
||||||
private uniqueLogs: string[] = [];
|
private uniqueLogs: string[] = [];
|
||||||
|
@ -62,8 +62,8 @@ class DatabaseMigration {
|
||||||
|
|
||||||
if (databaseSchemaVersion <= 2) {
|
if (databaseSchemaVersion <= 2) {
|
||||||
// Disable some spam logs when they're not relevant
|
// Disable some spam logs when they're not relevant
|
||||||
this.uniqueLogs.push(this.blocksTruncatedMessage);
|
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||||
this.uniqueLogs.push(this.hashratesTruncatedMessage);
|
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
|
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
|
||||||
|
@ -490,6 +490,16 @@ class DatabaseMigration {
|
||||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||||
await this.updateToSchemaVersion(55);
|
await this.updateToSchemaVersion(55);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (databaseSchemaVersion < 56) {
|
||||||
|
await this.$executeQuery('ALTER TABLE pools ADD unique_id int NOT NULL DEFAULT -1');
|
||||||
|
await this.$executeQuery('TRUNCATE TABLE `blocks`');
|
||||||
|
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||||
|
await this.$executeQuery('DELETE FROM `pools`');
|
||||||
|
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
|
||||||
|
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
|
||||||
|
await this.updateToSchemaVersion(56);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,15 +1,8 @@
|
||||||
import DB from '../database';
|
import DB from '../database';
|
||||||
import logger from '../logger';
|
import logger from '../logger';
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
import BlocksRepository from '../repositories/BlocksRepository';
|
import PoolsRepository from '../repositories/PoolsRepository';
|
||||||
|
import { PoolTag } from '../mempool.interfaces';
|
||||||
interface Pool {
|
|
||||||
name: string;
|
|
||||||
link: string;
|
|
||||||
regexes: string[];
|
|
||||||
addresses: string[];
|
|
||||||
slug: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
class PoolsParser {
|
class PoolsParser {
|
||||||
miningPools: any[] = [];
|
miningPools: any[] = [];
|
||||||
|
@ -20,270 +13,142 @@ class PoolsParser {
|
||||||
'addresses': '[]',
|
'addresses': '[]',
|
||||||
'slug': 'unknown'
|
'slug': 'unknown'
|
||||||
};
|
};
|
||||||
slugWarnFlag = false;
|
private uniqueLogs: string[] = [];
|
||||||
|
|
||||||
|
private uniqueLog(loggerFunction: any, msg: string): void {
|
||||||
|
if (this.uniqueLogs.includes(msg)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.uniqueLogs.push(msg);
|
||||||
|
loggerFunction(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
public setMiningPools(pools): void {
|
||||||
|
for (const pool of pools) {
|
||||||
|
pool.regexes = pool.tags;
|
||||||
|
delete(pool.tags);
|
||||||
|
}
|
||||||
|
this.miningPools = pools;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse the pools.json file, consolidate the data and dump it into the database
|
* Populate our db with updated mining pool definition
|
||||||
|
* @param pools
|
||||||
*/
|
*/
|
||||||
public async migratePoolsJson(poolsJson: object): Promise<void> {
|
public async migratePoolsJson(): Promise<void> {
|
||||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
await this.$insertUnknownPool();
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// First we save every entries without paying attention to pool duplication
|
for (const pool of this.miningPools) {
|
||||||
const poolsDuplicated: Pool[] = [];
|
if (!pool.id) {
|
||||||
|
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
|
||||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
continue;
|
||||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
|
||||||
poolsDuplicated.push({
|
|
||||||
'name': (<Pool>coinbaseTags[i][1]).name,
|
|
||||||
'link': (<Pool>coinbaseTags[i][1]).link,
|
|
||||||
'regexes': [coinbaseTags[i][0]],
|
|
||||||
'addresses': [],
|
|
||||||
'slug': ''
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
|
||||||
for (let i = 0; i < addressesTags.length; ++i) {
|
|
||||||
poolsDuplicated.push({
|
|
||||||
'name': (<Pool>addressesTags[i][1]).name,
|
|
||||||
'link': (<Pool>addressesTags[i][1]).link,
|
|
||||||
'regexes': [],
|
|
||||||
'addresses': [addressesTags[i][0]],
|
|
||||||
'slug': ''
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Then, we find unique mining pool names
|
|
||||||
const poolNames: string[] = [];
|
|
||||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
|
||||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
|
||||||
poolNames.push(poolsDuplicated[i].name);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
|
|
||||||
|
|
||||||
// Get existing pools from the db
|
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
|
||||||
let existingPools;
|
if (!poolDB) {
|
||||||
try {
|
// New mining pool
|
||||||
if (config.DATABASE.ENABLED === true) {
|
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||||
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
|
logger.debug(`Inserting new mining pool ${pool.name}`);
|
||||||
|
await PoolsRepository.$insertNewMiningPool(pool, slug);
|
||||||
|
await this.$deleteUnknownBlocks();
|
||||||
} else {
|
} else {
|
||||||
existingPools = [];
|
if (poolDB.name !== pool.name) {
|
||||||
}
|
// Pool has been renamed
|
||||||
} catch (e) {
|
const newSlug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
|
||||||
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
|
logger.warn(`Renaming ${poolDB.name} mining pool to ${pool.name}. Slug has been updated. Maybe you want to make a redirection from 'https://mempool.space/mining/pool/${poolDB.slug}' to 'https://mempool.space/mining/pool/${newSlug}`);
|
||||||
return;
|
await PoolsRepository.$renameMiningPool(poolDB.id, newSlug, pool.name);
|
||||||
}
|
|
||||||
|
|
||||||
this.miningPools = [];
|
|
||||||
|
|
||||||
// Finally, we generate the final consolidated pools data
|
|
||||||
const finalPoolDataAdd: Pool[] = [];
|
|
||||||
const finalPoolDataUpdate: Pool[] = [];
|
|
||||||
const finalPoolDataRename: Pool[] = [];
|
|
||||||
for (let i = 0; i < poolNames.length; ++i) {
|
|
||||||
let allAddresses: string[] = [];
|
|
||||||
let allRegexes: string[] = [];
|
|
||||||
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
|
|
||||||
|
|
||||||
for (let y = 0; y < match.length; ++y) {
|
|
||||||
allAddresses = allAddresses.concat(match[y].addresses);
|
|
||||||
allRegexes = allRegexes.concat(match[y].regexes);
|
|
||||||
}
|
|
||||||
|
|
||||||
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
|
|
||||||
|
|
||||||
let slug: string | undefined;
|
|
||||||
try {
|
|
||||||
slug = poolsJson['slugs'][poolNames[i]];
|
|
||||||
} catch (e) {
|
|
||||||
if (this.slugWarnFlag === false) {
|
|
||||||
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
|
|
||||||
this.slugWarnFlag = true;
|
|
||||||
}
|
}
|
||||||
}
|
if (poolDB.link !== pool.link) {
|
||||||
|
// Pool link has changed
|
||||||
if (slug === undefined) {
|
logger.debug(`Updating link for ${pool.name} mining pool`);
|
||||||
// Only keep alphanumerical
|
await PoolsRepository.$updateMiningPoolLink(poolDB.id, pool.link);
|
||||||
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
|
|
||||||
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
|
|
||||||
}
|
|
||||||
|
|
||||||
const poolObj = {
|
|
||||||
'name': finalPoolName,
|
|
||||||
'link': match[0].link,
|
|
||||||
'regexes': allRegexes,
|
|
||||||
'addresses': allAddresses,
|
|
||||||
'slug': slug
|
|
||||||
};
|
|
||||||
|
|
||||||
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
|
|
||||||
if (existingPool !== undefined) {
|
|
||||||
// Check if any data was actually updated
|
|
||||||
const equals = (a, b) =>
|
|
||||||
a.length === b.length &&
|
|
||||||
a.every((v, i) => v === b[i]);
|
|
||||||
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
|
|
||||||
finalPoolDataUpdate.push(poolObj);
|
|
||||||
}
|
}
|
||||||
} else if (config.DATABASE.ENABLED) {
|
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
|
||||||
// Double check that if we're not just renaming a pool (same address same regex)
|
JSON.stringify(pool.regexes) !== poolDB.regexes) {
|
||||||
const [poolToRename]: any[] = await DB.query(`
|
// Pool addresses changed or coinbase tags changed
|
||||||
SELECT * FROM pools
|
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
|
||||||
WHERE addresses = ? OR regexes = ?`,
|
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
|
||||||
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
|
await this.$deleteBlocksForPool(poolDB);
|
||||||
);
|
|
||||||
if (poolToRename && poolToRename.length > 0) {
|
|
||||||
// We're actually renaming an existing pool
|
|
||||||
finalPoolDataRename.push({
|
|
||||||
'name': poolObj.name,
|
|
||||||
'link': poolObj.link,
|
|
||||||
'regexes': allRegexes,
|
|
||||||
'addresses': allAddresses,
|
|
||||||
'slug': slug
|
|
||||||
});
|
|
||||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
|
|
||||||
} else {
|
|
||||||
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
|
|
||||||
finalPoolDataAdd.push(poolObj);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.miningPools.push({
|
|
||||||
'name': finalPoolName,
|
|
||||||
'link': match[0].link,
|
|
||||||
'regexes': JSON.stringify(allRegexes),
|
|
||||||
'addresses': JSON.stringify(allAddresses),
|
|
||||||
'slug': slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.DATABASE.ENABLED === false) { // Don't run db operations
|
|
||||||
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
|
|
||||||
finalPoolDataRename.length > 0
|
|
||||||
) {
|
|
||||||
logger.debug(`Update pools table now`, logger.tags.mining);
|
|
||||||
|
|
||||||
// Add new mining pools into the database
|
|
||||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
|
||||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
|
||||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
|
||||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
|
||||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
|
||||||
}
|
|
||||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
|
||||||
|
|
||||||
// Updated existing mining pools in the database
|
|
||||||
const updateQueries: string[] = [];
|
|
||||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
|
||||||
updateQueries.push(`
|
|
||||||
UPDATE pools
|
|
||||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
|
||||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
|
||||||
slug='${finalPoolDataUpdate[i].slug}'
|
|
||||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
|
||||||
;`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rename mining pools
|
|
||||||
const renameQueries: string[] = [];
|
|
||||||
for (let i = 0; i < finalPoolDataRename.length; ++i) {
|
|
||||||
renameQueries.push(`
|
|
||||||
UPDATE pools
|
|
||||||
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
|
|
||||||
slug='${finalPoolDataRename[i].slug}'
|
|
||||||
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
|
|
||||||
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
|
|
||||||
;`);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
|
|
||||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (finalPoolDataAdd.length > 0) {
|
|
||||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
|
||||||
}
|
|
||||||
for (const query of updateQueries) {
|
|
||||||
await DB.query({ sql: query, timeout: 120000 });
|
|
||||||
}
|
|
||||||
for (const query of renameQueries) {
|
|
||||||
await DB.query({ sql: query, timeout: 120000 });
|
|
||||||
}
|
|
||||||
await this.insertUnknownPool();
|
|
||||||
logger.info('Mining pools.json import completed', logger.tags.mining);
|
|
||||||
} catch (e) {
|
|
||||||
logger.err(`Cannot import pools in the database`, logger.tags.mining);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
logger.info('Mining pools.json import completed');
|
||||||
await this.insertUnknownPool();
|
|
||||||
} catch (e) {
|
|
||||||
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manually add the 'unknown pool'
|
* Manually add the 'unknown pool'
|
||||||
*/
|
*/
|
||||||
private async insertUnknownPool() {
|
public async $insertUnknownPool(): Promise<void> {
|
||||||
|
if (!config.DATABASE.ENABLED) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
|
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
|
||||||
if (rows.length === 0) {
|
if (rows.length === 0) {
|
||||||
await DB.query({
|
await DB.query({
|
||||||
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
|
sql: `INSERT INTO pools(name, link, regexes, addresses, slug, unique_id)
|
||||||
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
|
VALUES("${this.unknownPool.name}", "${this.unknownPool.link}", "[]", "[]", "${this.unknownPool.slug}", 0);
|
||||||
`});
|
`});
|
||||||
} else {
|
} else {
|
||||||
await DB.query(`UPDATE pools
|
await DB.query(`UPDATE pools
|
||||||
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
|
SET name='${this.unknownPool.name}', link='${this.unknownPool.link}',
|
||||||
regexes='[]', addresses='[]',
|
regexes='[]', addresses='[]',
|
||||||
slug='unknown'
|
slug='${this.unknownPool.slug}',
|
||||||
WHERE name='Unknown'
|
unique_id=0
|
||||||
|
WHERE slug='${this.unknownPool.slug}'
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
|
logger.err(`Unable to insert or update "Unknown" mining pool. Reason: ${e instanceof Error ? e.message : e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete blocks which needs to be reindexed
|
* Delete indexed blocks for an updated mining pool
|
||||||
|
*
|
||||||
|
* @param pool
|
||||||
*/
|
*/
|
||||||
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
|
||||||
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
// Get oldest blocks mined by the pool and assume pools.json updates only concern most recent years
|
||||||
if (blockCount === 0) {
|
// Ignore early days of Bitcoin as there were no mining pool yet
|
||||||
return;
|
const [oldestPoolBlock]: any[] = await DB.query(`
|
||||||
}
|
SELECT height
|
||||||
|
FROM blocks
|
||||||
for (const updatedPool of finalPoolDataUpdate) {
|
WHERE pool_id = ?
|
||||||
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
ORDER BY height
|
||||||
if (pool.length > 0) {
|
LIMIT 1`,
|
||||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
|
[pool.id]
|
||||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
);
|
||||||
}
|
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
|
||||||
}
|
|
||||||
|
|
||||||
// Ignore early days of Bitcoin as there were not mining pool yet
|
|
||||||
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
|
|
||||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
|
||||||
|
await DB.query(`
|
||||||
|
DELETE FROM blocks
|
||||||
|
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`,
|
||||||
|
[unknownPool[0].id]
|
||||||
|
);
|
||||||
|
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`);
|
||||||
|
await DB.query(`
|
||||||
|
DELETE FROM blocks
|
||||||
|
WHERE pool_id = ?`,
|
||||||
|
[pool.id]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
|
private async $deleteUnknownBlocks(): Promise<void> {
|
||||||
await DB.query(`DELETE FROM hashrates`);
|
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||||
|
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
|
||||||
|
await DB.query(`
|
||||||
|
DELETE FROM blocks
|
||||||
|
WHERE pool_id = ? AND height >= 130635`,
|
||||||
|
[unknownPool[0].id]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ const defaults: IConfig = {
|
||||||
'USER_AGENT': 'mempool',
|
'USER_AGENT': 'mempool',
|
||||||
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
||||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
|
||||||
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||||
'AUDIT': false,
|
'AUDIT': false,
|
||||||
'ADVANCED_GBT_AUDIT': false,
|
'ADVANCED_GBT_AUDIT': false,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { Common } from '../api/common';
|
import { Common } from '../api/common';
|
||||||
|
import poolsParser from '../api/pools-parser';
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
import DB from '../database';
|
import DB from '../database';
|
||||||
import logger from '../logger';
|
import logger from '../logger';
|
||||||
|
@ -17,7 +18,11 @@ class PoolsRepository {
|
||||||
* Get unknown pool tagging info
|
* Get unknown pool tagging info
|
||||||
*/
|
*/
|
||||||
public async $getUnknownPool(): Promise<PoolTag> {
|
public async $getUnknownPool(): Promise<PoolTag> {
|
||||||
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
let [rows]: any[] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
||||||
|
if (rows && rows.length === 0 && config.DATABASE.ENABLED) {
|
||||||
|
await poolsParser.$insertUnknownPool();
|
||||||
|
[rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
|
||||||
|
}
|
||||||
return <PoolTag>rows[0];
|
return <PoolTag>rows[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,7 +64,7 @@ class PoolsRepository {
|
||||||
/**
|
/**
|
||||||
* Get basic pool info and block count between two timestamp
|
* Get basic pool info and block count between two timestamp
|
||||||
*/
|
*/
|
||||||
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
|
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
|
||||||
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
|
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
|
||||||
FROM pools
|
FROM pools
|
||||||
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
|
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
|
||||||
|
@ -75,9 +80,9 @@ class PoolsRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get mining pool statistics for one pool
|
* Get a mining pool info
|
||||||
*/
|
*/
|
||||||
public async $getPool(slug: string): Promise<PoolTag | null> {
|
public async $getPool(slug: string, parse: boolean = true): Promise<PoolTag | null> {
|
||||||
const query = `
|
const query = `
|
||||||
SELECT *
|
SELECT *
|
||||||
FROM pools
|
FROM pools
|
||||||
|
@ -90,10 +95,12 @@ class PoolsRepository {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
rows[0].regexes = JSON.parse(rows[0].regexes);
|
if (parse) {
|
||||||
|
rows[0].regexes = JSON.parse(rows[0].regexes);
|
||||||
|
}
|
||||||
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||||
rows[0].addresses = []; // pools.json only contains mainnet addresses
|
rows[0].addresses = []; // pools.json only contains mainnet addresses
|
||||||
} else {
|
} else if (parse) {
|
||||||
rows[0].addresses = JSON.parse(rows[0].addresses);
|
rows[0].addresses = JSON.parse(rows[0].addresses);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,6 +110,116 @@ class PoolsRepository {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a mining pool info by its unique id
|
||||||
|
*/
|
||||||
|
public async $getPoolByUniqueId(id: number, parse: boolean = true): Promise<PoolTag | null> {
|
||||||
|
const query = `
|
||||||
|
SELECT *
|
||||||
|
FROM pools
|
||||||
|
WHERE pools.unique_id = ?`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const [rows]: any[] = await DB.query(query, [id]);
|
||||||
|
|
||||||
|
if (rows.length < 1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parse) {
|
||||||
|
rows[0].regexes = JSON.parse(rows[0].regexes);
|
||||||
|
}
|
||||||
|
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||||
|
rows[0].addresses = []; // pools.json only contains mainnet addresses
|
||||||
|
} else if (parse) {
|
||||||
|
rows[0].addresses = JSON.parse(rows[0].addresses);
|
||||||
|
}
|
||||||
|
|
||||||
|
return rows[0];
|
||||||
|
} catch (e) {
|
||||||
|
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a new mining pool in the database
|
||||||
|
*
|
||||||
|
* @param pool
|
||||||
|
*/
|
||||||
|
public async $insertNewMiningPool(pool: any, slug: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await DB.query(`
|
||||||
|
INSERT INTO pools
|
||||||
|
SET name = ?, link = ?, addresses = ?, regexes = ?, slug = ?, unique_id = ?`,
|
||||||
|
[pool.name, pool.link, JSON.stringify(pool.addresses), JSON.stringify(pool.regexes), slug, pool.id]
|
||||||
|
);
|
||||||
|
} catch (e: any) {
|
||||||
|
logger.err(`Cannot insert new mining pool into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rename an existing mining pool
|
||||||
|
*
|
||||||
|
* @param dbId
|
||||||
|
* @param newSlug
|
||||||
|
* @param newName
|
||||||
|
*/
|
||||||
|
public async $renameMiningPool(dbId: number, newSlug: string, newName: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await DB.query(`
|
||||||
|
UPDATE pools
|
||||||
|
SET slug = ?, name = ?
|
||||||
|
WHERE id = ?`,
|
||||||
|
[newSlug, newName, dbId]
|
||||||
|
);
|
||||||
|
} catch (e: any) {
|
||||||
|
logger.err(`Cannot rename mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update an exisiting mining pool link
|
||||||
|
*
|
||||||
|
* @param dbId
|
||||||
|
* @param newLink
|
||||||
|
*/
|
||||||
|
public async $updateMiningPoolLink(dbId: number, newLink: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await DB.query(`
|
||||||
|
UPDATE pools
|
||||||
|
SET link = ?
|
||||||
|
WHERE id = ?`,
|
||||||
|
[newLink, dbId]
|
||||||
|
);
|
||||||
|
} catch (e: any) {
|
||||||
|
logger.err(`Cannot update link for mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update an existing mining pool addresses or coinbase tags
|
||||||
|
*
|
||||||
|
* @param dbId
|
||||||
|
* @param addresses
|
||||||
|
* @param regexes
|
||||||
|
*/
|
||||||
|
public async $updateMiningPoolTags(dbId: number, addresses: string, regexes: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await DB.query(`
|
||||||
|
UPDATE pools
|
||||||
|
SET addresses = ?, regexes = ?
|
||||||
|
WHERE id = ?`,
|
||||||
|
[JSON.stringify(addresses), JSON.stringify(regexes), dbId]
|
||||||
|
);
|
||||||
|
} catch (e: any) {
|
||||||
|
logger.err(`Cannot update mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new PoolsRepository();
|
export default new PoolsRepository();
|
||||||
|
|
|
@ -8,7 +8,7 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
|
||||||
import * as https from 'https';
|
import * as https from 'https';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maintain the most recent version of pools.json
|
* Maintain the most recent version of pools-v2.json
|
||||||
*/
|
*/
|
||||||
class PoolsUpdater {
|
class PoolsUpdater {
|
||||||
lastRun: number = 0;
|
lastRun: number = 0;
|
||||||
|
@ -17,6 +17,11 @@ class PoolsUpdater {
|
||||||
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
|
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
|
||||||
|
|
||||||
public async updatePoolsJson(): Promise<void> {
|
public async updatePoolsJson(): Promise<void> {
|
||||||
|
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
||||||
|
logger.info(`Not updating mining pools to avoid inconsistency because AUTOMATIC_BLOCK_REINDEXING is set to false`)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -38,7 +43,7 @@ class PoolsUpdater {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
|
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
|
||||||
if (githubSha === undefined) {
|
if (githubSha === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -47,32 +52,46 @@ class PoolsUpdater {
|
||||||
this.currentSha = await this.getShaFromDb();
|
this.currentSha = await this.getShaFromDb();
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
||||||
if (this.currentSha !== undefined && this.currentSha === githubSha) {
|
if (this.currentSha !== undefined && this.currentSha === githubSha) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.currentSha === undefined) {
|
if (this.currentSha === undefined) {
|
||||||
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
|
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
|
||||||
} else {
|
} else {
|
||||||
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
|
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
|
||||||
}
|
}
|
||||||
const poolsJson = await this.query(this.poolsUrl);
|
const poolsJson = await this.query(this.poolsUrl);
|
||||||
if (poolsJson === undefined) {
|
if (poolsJson === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await poolsParser.migratePoolsJson(poolsJson);
|
poolsParser.setMiningPools(poolsJson);
|
||||||
await this.updateDBSha(githubSha);
|
|
||||||
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
|
if (config.DATABASE.ENABLED === false) { // Don't run db operations
|
||||||
|
logger.info('Mining pools-v2.json import completed (no database)');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await DB.query('START TRANSACTION;');
|
||||||
|
await poolsParser.migratePoolsJson();
|
||||||
|
await this.updateDBSha(githubSha);
|
||||||
|
await DB.query('COMMIT;');
|
||||||
|
} catch (e) {
|
||||||
|
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
|
||||||
|
await DB.query('ROLLBACK;');
|
||||||
|
}
|
||||||
|
logger.notice('PoolsUpdater completed');
|
||||||
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||||
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
|
logger.err(`PoolsUpdater failed. Will try again in 24h. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch our latest pools.json sha from the db
|
* Fetch our latest pools-v2.json sha from the db
|
||||||
*/
|
*/
|
||||||
private async updateDBSha(githubSha: string): Promise<void> {
|
private async updateDBSha(githubSha: string): Promise<void> {
|
||||||
this.currentSha = githubSha;
|
this.currentSha = githubSha;
|
||||||
|
@ -81,46 +100,46 @@ class PoolsUpdater {
|
||||||
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
||||||
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
logger.err('Cannot save github pools-v2.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch our latest pools.json sha from the db
|
* Fetch our latest pools-v2.json sha from the db
|
||||||
*/
|
*/
|
||||||
private async getShaFromDb(): Promise<string | undefined> {
|
private async getShaFromDb(): Promise<string | undefined> {
|
||||||
try {
|
try {
|
||||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||||
return (rows.length > 0 ? rows[0].string : undefined);
|
return (rows.length > 0 ? rows[0].string : undefined);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch our latest pools.json sha from github
|
* Fetch our latest pools-v2.json sha from github
|
||||||
*/
|
*/
|
||||||
private async fetchPoolsSha(): Promise<string | undefined> {
|
private async fetchPoolsSha(): Promise<string | undefined> {
|
||||||
const response = await this.query(this.treeUrl);
|
const response = await this.query(this.treeUrl);
|
||||||
|
|
||||||
if (response !== undefined) {
|
if (response !== undefined) {
|
||||||
for (const file of response['tree']) {
|
for (const file of response['tree']) {
|
||||||
if (file['path'] === 'pools.json') {
|
if (file['path'] === 'pools-v2.json') {
|
||||||
return file['sha'];
|
return file['sha'];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Http request wrapper
|
* Http request wrapper
|
||||||
*/
|
*/
|
||||||
private async query(path): Promise<object | undefined> {
|
private async query(path): Promise<any[] | undefined> {
|
||||||
type axiosOptions = {
|
type axiosOptions = {
|
||||||
headers: {
|
headers: {
|
||||||
'User-Agent': string
|
'User-Agent': string
|
||||||
|
|
Loading…
Add table
Reference in a new issue