mirror of
https://github.com/mempool/mempool.git
synced 2025-03-03 17:47:01 +01:00
Merge pull request #2002 from mempool/nymkappa/feature/automatic-block-reindexing
Automatic block re-indexing upon pools.json update
This commit is contained in:
commit
051d151fb7
1 changed files with 74 additions and 32 deletions
|
@ -1,6 +1,7 @@
|
||||||
import DB from '../database';
|
import DB from '../database';
|
||||||
import logger from '../logger';
|
import logger from '../logger';
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
|
import BlocksRepository from '../repositories/BlocksRepository';
|
||||||
|
|
||||||
interface Pool {
|
interface Pool {
|
||||||
name: string;
|
name: string;
|
||||||
|
@ -32,7 +33,6 @@ class PoolsParser {
|
||||||
// First we save every entries without paying attention to pool duplication
|
// First we save every entries without paying attention to pool duplication
|
||||||
const poolsDuplicated: Pool[] = [];
|
const poolsDuplicated: Pool[] = [];
|
||||||
|
|
||||||
logger.debug('Parse coinbase_tags');
|
|
||||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
||||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
for (let i = 0; i < coinbaseTags.length; ++i) {
|
||||||
poolsDuplicated.push({
|
poolsDuplicated.push({
|
||||||
|
@ -43,7 +43,6 @@ class PoolsParser {
|
||||||
'slug': ''
|
'slug': ''
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
logger.debug('Parse payout_addresses');
|
|
||||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
||||||
for (let i = 0; i < addressesTags.length; ++i) {
|
for (let i = 0; i < addressesTags.length; ++i) {
|
||||||
poolsDuplicated.push({
|
poolsDuplicated.push({
|
||||||
|
@ -56,7 +55,6 @@ class PoolsParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then, we find unique mining pool names
|
// Then, we find unique mining pool names
|
||||||
logger.debug('Identify unique mining pools');
|
|
||||||
const poolNames: string[] = [];
|
const poolNames: string[] = [];
|
||||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
||||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
||||||
|
@ -119,8 +117,15 @@ class PoolsParser {
|
||||||
'slug': slug
|
'slug': slug
|
||||||
};
|
};
|
||||||
|
|
||||||
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
|
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
|
||||||
|
if (existingPool !== undefined) {
|
||||||
|
// Check if any data was actually updated
|
||||||
|
const equals = (a, b) =>
|
||||||
|
a.length === b.length &&
|
||||||
|
a.every((v, i) => v === b[i]);
|
||||||
|
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
|
||||||
finalPoolDataUpdate.push(poolObj);
|
finalPoolDataUpdate.push(poolObj);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||||
finalPoolDataAdd.push(poolObj);
|
finalPoolDataAdd.push(poolObj);
|
||||||
|
@ -140,6 +145,7 @@ class PoolsParser {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
|
||||||
logger.debug(`Update pools table now`);
|
logger.debug(`Update pools table now`);
|
||||||
|
|
||||||
// Add new mining pools into the database
|
// Add new mining pools into the database
|
||||||
|
@ -164,6 +170,8 @@ class PoolsParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||||
|
|
||||||
if (finalPoolDataAdd.length > 0) {
|
if (finalPoolDataAdd.length > 0) {
|
||||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||||
}
|
}
|
||||||
|
@ -178,6 +186,14 @@ class PoolsParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.insertUnknownPool();
|
||||||
|
} catch (e) {
|
||||||
|
logger.err(`Cannot insert unknown pool in the database`);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manually add the 'unknown pool'
|
* Manually add the 'unknown pool'
|
||||||
*/
|
*/
|
||||||
|
@ -201,6 +217,32 @@ class PoolsParser {
|
||||||
logger.err('Unable to insert "Unknown" mining pool');
|
logger.err('Unable to insert "Unknown" mining pool');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete blocks which needs to be reindexed
|
||||||
|
*/
|
||||||
|
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
||||||
|
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||||
|
if (blockCount === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const updatedPool of finalPoolDataUpdate) {
|
||||||
|
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
||||||
|
if (pool.length > 0) {
|
||||||
|
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
|
||||||
|
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ignore early days of Bitcoin as there were not mining pool yet
|
||||||
|
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
|
||||||
|
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||||
|
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
||||||
|
|
||||||
|
logger.notice('Truncating hashrates for future re-indexing');
|
||||||
|
await DB.query(`DELETE FROM hashrates`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new PoolsParser();
|
export default new PoolsParser();
|
||||||
|
|
Loading…
Add table
Reference in a new issue