Merge pull request #5207 from mempool/mononaut/pool-reindexing

Pool reindexing
This commit is contained in:
softsimon 2024-07-01 11:32:03 +09:00 committed by GitHub
commit 300bfd225b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 124 additions and 92 deletions

View File

@ -235,7 +235,7 @@ To manually update your mining pools, you can use the `--update-pools` command l
You can enable the automatic mining pools update by settings `config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` to `true` in your `mempool-config.json`. You can enable the automatic mining pools update by settings `config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` to `true` in your `mempool-config.json`.
When a `coinbase tag` or `coinbase address` change is detected, all blocks tagged to the `unknown` mining pools (starting from height 130635) will be deleted from the `blocks` table. Additionally, all blocks which were tagged to the pool which has been updated will also be deleted from the `blocks` table. Of course, those blocks will be automatically reindexed. When a `coinbase tag` or `coinbase address` change is detected, pool assignments for all relevant blocks (tagged to that pool or the `unknown` mining pool, starting from height 130635) are updated using the new criteria.
### Re-index tables ### Re-index tables

View File

@ -372,8 +372,7 @@ class Blocks {
} }
} }
const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig); const addresses = txMinerInfo.vout.map((vout) => vout.scriptpubkey_address).filter(address => address) as string[];
const addresses = txMinerInfo.vout.map((vout) => vout.scriptpubkey_address).filter((address) => address);
let pools: PoolTag[] = []; let pools: PoolTag[] = [];
if (config.DATABASE.ENABLED === true) { if (config.DATABASE.ENABLED === true) {
@ -382,26 +381,9 @@ class Blocks {
pools = poolsParser.miningPools; pools = poolsParser.miningPools;
} }
for (let i = 0; i < pools.length; ++i) { const pool = poolsParser.matchBlockMiner(txMinerInfo.vin[0].scriptsig, addresses || [], pools);
if (addresses.length) { if (pool) {
const poolAddresses: string[] = typeof pools[i].addresses === 'string' ? return pool;
JSON.parse(pools[i].addresses) : pools[i].addresses;
for (let y = 0; y < poolAddresses.length; y++) {
if (addresses.indexOf(poolAddresses[y]) !== -1) {
return pools[i];
}
}
}
const regexes: string[] = typeof pools[i].regexes === 'string' ?
JSON.parse(pools[i].regexes) : pools[i].regexes;
for (let y = 0; y < regexes.length; ++y) {
const regex = new RegExp(regexes[y], 'i');
const match = asciiScriptSig.match(regex);
if (match !== null) {
return pools[i];
}
}
} }
if (config.DATABASE.ENABLED === true) { if (config.DATABASE.ENABLED === true) {

View File

@ -5,6 +5,9 @@ import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces'; import { PoolTag } from '../mempool.interfaces';
import diskCache from './disk-cache'; import diskCache from './disk-cache';
import mining from './mining/mining'; import mining from './mining/mining';
import transactionUtils from './transaction-utils';
import BlocksRepository from '../repositories/BlocksRepository';
import redisCache from './redis-cache';
class PoolsParser { class PoolsParser {
miningPools: any[] = []; miningPools: any[] = [];
@ -37,15 +40,18 @@ class PoolsParser {
/** /**
* Populate our db with updated mining pool definition * Populate our db with updated mining pool definition
* @param pools * @param pools
*/ */
public async migratePoolsJson(): Promise<void> { public async migratePoolsJson(): Promise<void> {
// We also need to wipe the backend cache to make sure we don't serve blocks with // We also need to wipe the backend cache to make sure we don't serve blocks with
// the wrong mining pool (usually happen with unknown blocks) // the wrong mining pool (usually happen with unknown blocks)
diskCache.setIgnoreBlocksCache(); diskCache.setIgnoreBlocksCache();
redisCache.setIgnoreBlocksCache();
await this.$insertUnknownPool(); await this.$insertUnknownPool();
let reindexUnknown = false;
for (const pool of this.miningPools) { for (const pool of this.miningPools) {
if (!pool.id) { if (!pool.id) {
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`); logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
@ -57,22 +63,22 @@ class PoolsParser {
logger.err(`Mining pool ${pool.name} must have at least one of the fields 'addresses' or 'regexes'. Skipping.`); logger.err(`Mining pool ${pool.name} must have at least one of the fields 'addresses' or 'regexes'. Skipping.`);
continue; continue;
} }
pool.addresses = pool.addresses || []; pool.addresses = pool.addresses || [];
pool.regexes = pool.regexes || []; pool.regexes = pool.regexes || [];
if (pool.addresses.length === 0 && pool.regexes.length === 0) { if (pool.addresses.length === 0 && pool.regexes.length === 0) {
logger.err(`Mining pool ${pool.name} has no 'addresses' nor 'regexes' defined. Skipping.`); logger.err(`Mining pool ${pool.name} has no 'addresses' nor 'regexes' defined. Skipping.`);
continue; continue;
} }
if (pool.addresses.length === 0) { if (pool.addresses.length === 0) {
logger.warn(`Mining pool ${pool.name} has no 'addresses' defined.`); logger.warn(`Mining pool ${pool.name} has no 'addresses' defined.`);
} }
if (pool.regexes.length === 0) { if (pool.regexes.length === 0) {
logger.warn(`Mining pool ${pool.name} has no 'regexes' defined.`); logger.warn(`Mining pool ${pool.name} has no 'regexes' defined.`);
} }
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false); const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
if (!poolDB) { if (!poolDB) {
@ -80,7 +86,7 @@ class PoolsParser {
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase(); const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.debug(`Inserting new mining pool ${pool.name}`); logger.debug(`Inserting new mining pool ${pool.name}`);
await PoolsRepository.$insertNewMiningPool(pool, slug); await PoolsRepository.$insertNewMiningPool(pool, slug);
await this.$deleteUnknownBlocks(); reindexUnknown = true;
} else { } else {
if (poolDB.name !== pool.name) { if (poolDB.name !== pool.name) {
// Pool has been renamed // Pool has been renamed
@ -98,7 +104,45 @@ class PoolsParser {
// Pool addresses changed or coinbase tags changed // Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool.`); logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool.`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes); await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB); reindexUnknown = true;
await this.$reindexBlocksForPool(poolDB.id);
}
}
}
if (reindexUnknown) {
logger.notice(`Updating addresses and/or coinbase tags for unknown mining pool.`);
let unknownPool;
if (config.DATABASE.ENABLED === true) {
unknownPool = await PoolsRepository.$getUnknownPool();
} else {
unknownPool = this.unknownPool;
}
await this.$reindexBlocksForPool(unknownPool.id);
}
}
public matchBlockMiner(scriptsig: string, addresses: string[], pools: PoolTag[]): PoolTag | undefined {
const asciiScriptSig = transactionUtils.hex2ascii(scriptsig);
for (let i = 0; i < pools.length; ++i) {
if (addresses.length) {
const poolAddresses: string[] = typeof pools[i].addresses === 'string' ?
JSON.parse(pools[i].addresses) : pools[i].addresses;
for (let y = 0; y < poolAddresses.length; y++) {
if (addresses.indexOf(poolAddresses[y]) !== -1) {
return pools[i];
}
}
}
const regexes: string[] = typeof pools[i].regexes === 'string' ?
JSON.parse(pools[i].regexes) : pools[i].regexes;
for (let y = 0; y < regexes.length; ++y) {
const regex = new RegExp(regexes[y], 'i');
const match = asciiScriptSig.match(regex);
if (match !== null) {
return pools[i];
} }
} }
} }
@ -134,68 +178,47 @@ class PoolsParser {
} }
/** /**
* Delete indexed blocks for an updated mining pool * re-index pool assignment for blocks previously associated with pool
* *
* @param pool * @param pool local id of existing pool to reindex
*/ */
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> { private async $reindexBlocksForPool(poolId: number): Promise<void> {
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
// Ignore early days of Bitcoin as there were no mining pool yet if (config.MEMPOOL.NETWORK === 'testnet') {
const [oldestPoolBlock]: any[] = await DB.query(` firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
SELECT height } else if (config.MEMPOOL.NETWORK === 'signet') {
firstKnownBlockPool = 0;
}
const [blocks]: any[] = await DB.query(`
SELECT height, hash, coinbase_raw, coinbase_addresses
FROM blocks FROM blocks
WHERE pool_id = ? WHERE pool_id = ?
ORDER BY height AND height >= ?
LIMIT 1`, ORDER BY height DESC
[pool.id] `, [poolId, firstKnownBlockPool]);
);
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52 let pools: PoolTag[] = [];
if (config.MEMPOOL.NETWORK === 'testnet') { if (config.DATABASE.ENABLED === true) {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581 pools = await PoolsRepository.$getPools();
} else if (config.MEMPOOL.NETWORK === 'signet') { } else {
firstKnownBlockPool = 0; pools = this.miningPools;
} }
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : firstKnownBlockPool; let changed = 0;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`); for (const block of blocks) {
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`); const addresses = JSON.parse(block.coinbase_addresses) || [];
await DB.query(` const newPool = this.matchBlockMiner(block.coinbase_raw, addresses, pools);
DELETE FROM blocks if (newPool && newPool.id !== poolId) {
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`, changed++;
[unknownPool[0].id] await BlocksRepository.$savePool(block.hash, newPool.id);
); }
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`); }
await DB.query(`
DELETE FROM blocks logger.info(`${changed} blocks assigned to a new pool`, logger.tags.mining);
WHERE pool_id = ?`,
[pool.id]
);
// Re-index hashrates and difficulty adjustments later // Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true; mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
}
private async $deleteUnknownBlocks(): Promise<void> {
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
if (config.MEMPOOL.NETWORK === 'testnet') {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
} else if (config.MEMPOOL.NETWORK === 'signet') {
firstKnownBlockPool = 0;
}
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${firstKnownBlockPool} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= ${firstKnownBlockPool}`,
[unknownPool[0].id]
);
// Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
} }
} }

View File

@ -27,6 +27,7 @@ class RedisCache {
private rbfCacheQueue: { type: string, txid: string, value: any }[] = []; private rbfCacheQueue: { type: string, txid: string, value: any }[] = [];
private rbfRemoveQueue: { type: string, txid: string }[] = []; private rbfRemoveQueue: { type: string, txid: string }[] = [];
private txFlushLimit: number = 10000; private txFlushLimit: number = 10000;
private ignoreBlocksCache = false;
constructor() { constructor() {
if (config.REDIS.ENABLED) { if (config.REDIS.ENABLED) {
@ -341,9 +342,7 @@ class RedisCache {
return; return;
} }
logger.info('Restoring mempool and blocks data from Redis cache'); logger.info('Restoring mempool and blocks data from Redis cache');
// Load block data
const loadedBlocks = await this.$getBlocks();
const loadedBlockSummaries = await this.$getBlockSummaries();
// Load mempool // Load mempool
const loadedMempool = await this.$getMempool(); const loadedMempool = await this.$getMempool();
this.inflateLoadedTxs(loadedMempool); this.inflateLoadedTxs(loadedMempool);
@ -352,9 +351,14 @@ class RedisCache {
const rbfTrees = await this.$getRbfEntries('tree'); const rbfTrees = await this.$getRbfEntries('tree');
const rbfExpirations = await this.$getRbfEntries('exp'); const rbfExpirations = await this.$getRbfEntries('exp');
// Set loaded data // Load & set block data
blocks.setBlocks(loadedBlocks || []); if (!this.ignoreBlocksCache) {
blocks.setBlockSummaries(loadedBlockSummaries || []); const loadedBlocks = await this.$getBlocks();
const loadedBlockSummaries = await this.$getBlockSummaries();
blocks.setBlocks(loadedBlocks || []);
blocks.setBlockSummaries(loadedBlockSummaries || []);
}
// Set other data
await memPool.$setMempool(loadedMempool); await memPool.$setMempool(loadedMempool);
await rbfCache.load({ await rbfCache.load({
txs: rbfTxs, txs: rbfTxs,
@ -411,6 +415,10 @@ class RedisCache {
} }
return result; return result;
} }
public setIgnoreBlocksCache(): void {
this.ignoreBlocksCache = true;
}
} }
export default new RedisCache(); export default new RedisCache();

View File

@ -5,7 +5,7 @@ import logger from '../logger';
import { Common } from '../api/common'; import { Common } from '../api/common';
import PoolsRepository from './PoolsRepository'; import PoolsRepository from './PoolsRepository';
import HashratesRepository from './HashratesRepository'; import HashratesRepository from './HashratesRepository';
import { RowDataPacket, escape } from 'mysql2'; import { RowDataPacket } from 'mysql2';
import BlocksSummariesRepository from './BlocksSummariesRepository'; import BlocksSummariesRepository from './BlocksSummariesRepository';
import DifficultyAdjustmentsRepository from './DifficultyAdjustmentsRepository'; import DifficultyAdjustmentsRepository from './DifficultyAdjustmentsRepository';
import bitcoinClient from '../api/bitcoin/bitcoin-client'; import bitcoinClient from '../api/bitcoin/bitcoin-client';
@ -532,7 +532,7 @@ class BlocksRepository {
return null; return null;
} }
return await this.formatDbBlockIntoExtendedBlock(rows[0] as DatabaseBlock); return await this.formatDbBlockIntoExtendedBlock(rows[0] as DatabaseBlock);
} catch (e) { } catch (e) {
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e)); logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
throw e; throw e;
@ -1001,6 +1001,25 @@ class BlocksRepository {
} }
} }
/**
* Save pool
*
* @param id
* @param poolId
*/
public async $savePool(id: string, poolId: number): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET pool_id = ?
WHERE hash = ?`,
[poolId, id]
);
} catch (e) {
logger.err(`Cannot update block pool. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/** /**
* Convert a mysql row block into a BlockExtended. Note that you * Convert a mysql row block into a BlockExtended. Note that you
* must provide the correct field into dbBlk object param * must provide the correct field into dbBlk object param