Merge pull request #2869 from mempool/nymkappa/feature/rewrite-pool-parser

Rewrite mining pools parser
This commit is contained in:
wiz 2023-02-25 17:50:19 +09:00 committed by GitHub
commit b7e6b6da13
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 272 additions and 261 deletions

View file

@ -36,7 +36,7 @@ describe('Mempool Backend Config', () => {
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,

View file

@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 55;
private static currentVersion = 56;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@ -62,8 +62,8 @@ class DatabaseMigration {
if (databaseSchemaVersion <= 2) {
// Disable some spam logs when they're not relevant
this.uniqueLogs.push(this.blocksTruncatedMessage);
this.uniqueLogs.push(this.hashratesTruncatedMessage);
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
}
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
@ -490,6 +490,16 @@ class DatabaseMigration {
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.updateToSchemaVersion(55);
}
if (databaseSchemaVersion < 56) {
await this.$executeQuery('ALTER TABLE pools ADD unique_id int NOT NULL DEFAULT -1');
await this.$executeQuery('TRUNCATE TABLE `blocks`');
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}
}
/**

View file

@ -1,15 +1,8 @@
import DB from '../database';
import logger from '../logger';
import config from '../config';
import BlocksRepository from '../repositories/BlocksRepository';
interface Pool {
name: string;
link: string;
regexes: string[];
addresses: string[];
slug: string;
}
import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces';
class PoolsParser {
miningPools: any[] = [];
@ -20,270 +13,142 @@ class PoolsParser {
'addresses': '[]',
'slug': 'unknown'
};
slugWarnFlag = false;
private uniqueLogs: string[] = [];
private uniqueLog(loggerFunction: any, msg: string): void {
if (this.uniqueLogs.includes(msg)) {
return;
}
this.uniqueLogs.push(msg);
loggerFunction(msg);
}
public setMiningPools(pools): void {
for (const pool of pools) {
pool.regexes = pool.tags;
delete(pool.tags);
}
this.miningPools = pools;
}
/**
* Parse the pools.json file, consolidate the data and dump it into the database
* Populate our db with updated mining pool definition
* @param pools
*/
public async migratePoolsJson(poolsJson: object): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
public async migratePoolsJson(): Promise<void> {
await this.$insertUnknownPool();
// First we save every entries without paying attention to pool duplication
const poolsDuplicated: Pool[] = [];
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
for (let i = 0; i < coinbaseTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>coinbaseTags[i][1]).name,
'link': (<Pool>coinbaseTags[i][1]).link,
'regexes': [coinbaseTags[i][0]],
'addresses': [],
'slug': ''
});
}
const addressesTags = Object.entries(poolsJson['payout_addresses']);
for (let i = 0; i < addressesTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>addressesTags[i][1]).name,
'link': (<Pool>addressesTags[i][1]).link,
'regexes': [],
'addresses': [addressesTags[i][0]],
'slug': ''
});
}
// Then, we find unique mining pool names
const poolNames: string[] = [];
for (let i = 0; i < poolsDuplicated.length; ++i) {
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
poolNames.push(poolsDuplicated[i].name);
for (const pool of this.miningPools) {
if (!pool.id) {
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
continue;
}
}
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
// Get existing pools from the db
let existingPools;
try {
if (config.DATABASE.ENABLED === true) {
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
if (!poolDB) {
// New mining pool
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.debug(`Inserting new mining pool ${pool.name}`);
await PoolsRepository.$insertNewMiningPool(pool, slug);
await this.$deleteUnknownBlocks();
} else {
existingPools = [];
}
} catch (e) {
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
return;
}
this.miningPools = [];
// Finally, we generate the final consolidated pools data
const finalPoolDataAdd: Pool[] = [];
const finalPoolDataUpdate: Pool[] = [];
const finalPoolDataRename: Pool[] = [];
for (let i = 0; i < poolNames.length; ++i) {
let allAddresses: string[] = [];
let allRegexes: string[] = [];
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
for (let y = 0; y < match.length; ++y) {
allAddresses = allAddresses.concat(match[y].addresses);
allRegexes = allRegexes.concat(match[y].regexes);
}
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
let slug: string | undefined;
try {
slug = poolsJson['slugs'][poolNames[i]];
} catch (e) {
if (this.slugWarnFlag === false) {
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
this.slugWarnFlag = true;
if (poolDB.name !== pool.name) {
// Pool has been renamed
const newSlug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`Renaming ${poolDB.name} mining pool to ${pool.name}. Slug has been updated. Maybe you want to make a redirection from 'https://mempool.space/mining/pool/${poolDB.slug}' to 'https://mempool.space/mining/pool/${newSlug}`);
await PoolsRepository.$renameMiningPool(poolDB.id, newSlug, pool.name);
}
}
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
}
const poolObj = {
'name': finalPoolName,
'link': match[0].link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
};
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
if (existingPool !== undefined) {
// Check if any data was actually updated
const equals = (a, b) =>
a.length === b.length &&
a.every((v, i) => v === b[i]);
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
finalPoolDataUpdate.push(poolObj);
if (poolDB.link !== pool.link) {
// Pool link has changed
logger.debug(`Updating link for ${pool.name} mining pool`);
await PoolsRepository.$updateMiningPoolLink(poolDB.id, pool.link);
}
} else if (config.DATABASE.ENABLED) {
// Double check that if we're not just renaming a pool (same address same regex)
const [poolToRename]: any[] = await DB.query(`
SELECT * FROM pools
WHERE addresses = ? OR regexes = ?`,
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
);
if (poolToRename && poolToRename.length > 0) {
// We're actually renaming an existing pool
finalPoolDataRename.push({
'name': poolObj.name,
'link': poolObj.link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
});
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
} else {
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
finalPoolDataAdd.push(poolObj);
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
// Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB);
}
}
this.miningPools.push({
'name': finalPoolName,
'link': match[0].link,
'regexes': JSON.stringify(allRegexes),
'addresses': JSON.stringify(allAddresses),
'slug': slug
});
}
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
finalPoolDataRename.length > 0
) {
logger.debug(`Update pools table now`, logger.tags.mining);
// Add new mining pools into the database
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
}
queryAdd = queryAdd.slice(0, -1) + ';';
// Updated existing mining pools in the database
const updateQueries: string[] = [];
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
updateQueries.push(`
UPDATE pools
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
slug='${finalPoolDataUpdate[i].slug}'
WHERE name='${finalPoolDataUpdate[i].name}'
;`);
}
// Rename mining pools
const renameQueries: string[] = [];
for (let i = 0; i < finalPoolDataRename.length; ++i) {
renameQueries.push(`
UPDATE pools
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
slug='${finalPoolDataRename[i].slug}'
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
;`);
}
try {
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
}
if (finalPoolDataAdd.length > 0) {
await DB.query({ sql: queryAdd, timeout: 120000 });
}
for (const query of updateQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
for (const query of renameQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed', logger.tags.mining);
} catch (e) {
logger.err(`Cannot import pools in the database`, logger.tags.mining);
throw e;
}
}
try {
await this.insertUnknownPool();
} catch (e) {
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
throw e;
}
logger.info('Mining pools.json import completed');
}
/**
* Manually add the 'unknown pool'
*/
private async insertUnknownPool() {
public async $insertUnknownPool(): Promise<void> {
if (!config.DATABASE.ENABLED) {
return;
}
try {
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
if (rows.length === 0) {
await DB.query({
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
sql: `INSERT INTO pools(name, link, regexes, addresses, slug, unique_id)
VALUES("${this.unknownPool.name}", "${this.unknownPool.link}", "[]", "[]", "${this.unknownPool.slug}", 0);
`});
} else {
await DB.query(`UPDATE pools
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
SET name='${this.unknownPool.name}', link='${this.unknownPool.link}',
regexes='[]', addresses='[]',
slug='unknown'
WHERE name='Unknown'
slug='${this.unknownPool.slug}',
unique_id=0
WHERE slug='${this.unknownPool.slug}'
`);
}
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
logger.err(`Unable to insert or update "Unknown" mining pool. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Delete blocks which needs to be reindexed
* Delete indexed blocks for an updated mining pool
*
* @param pool
*/
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
const blockCount = await BlocksRepository.$blockCount(null, null);
if (blockCount === 0) {
return;
}
for (const updatedPool of finalPoolDataUpdate) {
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
if (pool.length > 0) {
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
}
}
// Ignore early days of Bitcoin as there were not mining pool yet
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
// Get oldest blocks mined by the pool and assume pools.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`
SELECT height
FROM blocks
WHERE pool_id = ?
ORDER BY height
LIMIT 1`,
[pool.id]
);
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`,
[unknownPool[0].id]
);
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ?`,
[pool.id]
);
}
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM hashrates`);
private async $deleteUnknownBlocks(): Promise<void> {
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= 130635`,
[unknownPool[0].id]
);
}
}

View file

@ -148,7 +148,7 @@ const defaults: IConfig = {
'USER_AGENT': 'mempool',
'STDOUT_LOG_MIN_PRIORITY': 'debug',
'AUTOMATIC_BLOCK_REINDEXING': false,
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
'AUDIT': false,
'ADVANCED_GBT_AUDIT': false,

View file

@ -1,4 +1,5 @@
import { Common } from '../api/common';
import poolsParser from '../api/pools-parser';
import config from '../config';
import DB from '../database';
import logger from '../logger';
@ -17,7 +18,11 @@ class PoolsRepository {
* Get unknown pool tagging info
*/
public async $getUnknownPool(): Promise<PoolTag> {
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
let [rows]: any[] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
if (rows && rows.length === 0 && config.DATABASE.ENABLED) {
await poolsParser.$insertUnknownPool();
[rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
}
return <PoolTag>rows[0];
}
@ -59,7 +64,7 @@ class PoolsRepository {
/**
* Get basic pool info and block count between two timestamp
*/
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
FROM pools
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
@ -75,9 +80,9 @@ class PoolsRepository {
}
/**
* Get mining pool statistics for one pool
* Get a mining pool info
*/
public async $getPool(slug: string): Promise<PoolTag | null> {
public async $getPool(slug: string, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
@ -90,10 +95,12 @@ class PoolsRepository {
return null;
}
rows[0].regexes = JSON.parse(rows[0].regexes);
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else {
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
@ -103,6 +110,116 @@ class PoolsRepository {
throw e;
}
}
/**
* Get a mining pool info by its unique id
*/
public async $getPoolByUniqueId(id: number, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
WHERE pools.unique_id = ?`;
try {
const [rows]: any[] = await DB.query(query, [id]);
if (rows.length < 1) {
return null;
}
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
return rows[0];
} catch (e) {
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Insert a new mining pool in the database
*
* @param pool
*/
public async $insertNewMiningPool(pool: any, slug: string): Promise<void> {
try {
await DB.query(`
INSERT INTO pools
SET name = ?, link = ?, addresses = ?, regexes = ?, slug = ?, unique_id = ?`,
[pool.name, pool.link, JSON.stringify(pool.addresses), JSON.stringify(pool.regexes), slug, pool.id]
);
} catch (e: any) {
logger.err(`Cannot insert new mining pool into db. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Rename an existing mining pool
*
* @param dbId
* @param newSlug
* @param newName
*/
public async $renameMiningPool(dbId: number, newSlug: string, newName: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET slug = ?, name = ?
WHERE id = ?`,
[newSlug, newName, dbId]
);
} catch (e: any) {
logger.err(`Cannot rename mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an exisiting mining pool link
*
* @param dbId
* @param newLink
*/
public async $updateMiningPoolLink(dbId: number, newLink: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET link = ?
WHERE id = ?`,
[newLink, dbId]
);
} catch (e: any) {
logger.err(`Cannot update link for mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an existing mining pool addresses or coinbase tags
*
* @param dbId
* @param addresses
* @param regexes
*/
public async $updateMiningPoolTags(dbId: number, addresses: string, regexes: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET addresses = ?, regexes = ?
WHERE id = ?`,
[JSON.stringify(addresses), JSON.stringify(regexes), dbId]
);
} catch (e: any) {
logger.err(`Cannot update mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
export default new PoolsRepository();

View file

@ -8,7 +8,7 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
import * as https from 'https';
/**
* Maintain the most recent version of pools.json
* Maintain the most recent version of pools-v2.json
*/
class PoolsUpdater {
lastRun: number = 0;
@ -17,6 +17,11 @@ class PoolsUpdater {
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
public async updatePoolsJson(): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
logger.info(`Not updating mining pools to avoid inconsistency because AUTOMATIC_BLOCK_REINDEXING is set to false`)
return;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
@ -38,7 +43,7 @@ class PoolsUpdater {
}
try {
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
if (githubSha === undefined) {
return;
}
@ -47,32 +52,46 @@ class PoolsUpdater {
this.currentSha = await this.getShaFromDb();
}
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
if (this.currentSha !== undefined && this.currentSha === githubSha) {
return;
}
if (this.currentSha === undefined) {
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
} else {
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
return;
}
await poolsParser.migratePoolsJson(poolsJson);
await this.updateDBSha(githubSha);
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
poolsParser.setMiningPools(poolsJson);
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools-v2.json import completed (no database)');
return;
}
try {
await DB.query('START TRANSACTION;');
await poolsParser.migratePoolsJson();
await this.updateDBSha(githubSha);
await DB.query('COMMIT;');
} catch (e) {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;');
}
logger.notice('PoolsUpdater completed');
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
logger.err(`PoolsUpdater failed. Will try again in 24h. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async updateDBSha(githubSha: string): Promise<void> {
this.currentSha = githubSha;
@ -81,46 +100,46 @@ class PoolsUpdater {
await DB.query('DELETE FROM state where name="pools_json_sha"');
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
} catch (e) {
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
logger.err('Cannot save github pools-v2.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async getShaFromDb(): Promise<string | undefined> {
try {
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
return (rows.length > 0 ? rows[0].string : undefined);
} catch (e) {
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
return undefined;
}
}
/**
* Fetch our latest pools.json sha from github
* Fetch our latest pools-v2.json sha from github
*/
private async fetchPoolsSha(): Promise<string | undefined> {
const response = await this.query(this.treeUrl);
if (response !== undefined) {
for (const file of response['tree']) {
if (file['path'] === 'pools.json') {
if (file['path'] === 'pools-v2.json') {
return file['sha'];
}
}
}
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
return undefined;
}
/**
* Http request wrapper
*/
private async query(path): Promise<object | undefined> {
private async query(path): Promise<any[] | undefined> {
type axiosOptions = {
headers: {
'User-Agent': string