mirror of
https://github.com/mempool/mempool.git
synced 2025-03-03 17:47:01 +01:00
Merge branch 'master' into simon/address-prefix-bug
This commit is contained in:
commit
552c717693
4 changed files with 112 additions and 108 deletions
|
@ -218,31 +218,28 @@ class Blocks {
|
||||||
if (blockHeight < lastBlockToIndex) {
|
if (blockHeight < lastBlockToIndex) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
try {
|
++indexedThisRun;
|
||||||
++indexedThisRun;
|
if (++totaIndexed % 100 === 0 || blockHeight === lastBlockToIndex) {
|
||||||
if (++totaIndexed % 100 === 0 || blockHeight === lastBlockToIndex) {
|
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
const blockPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||||
const blockPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
const progress = Math.round(totaIndexed / indexingBlockAmount * 100);
|
||||||
const progress = Math.round(totaIndexed / indexingBlockAmount * 100);
|
const timeLeft = Math.round((indexingBlockAmount - totaIndexed) / blockPerSeconds);
|
||||||
const timeLeft = Math.round((indexingBlockAmount - totaIndexed) / blockPerSeconds);
|
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds} blocks/sec | total: ${totaIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${elapsedSeconds} seconds | left: ~${timeLeft} seconds`);
|
||||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds} blocks/sec | total: ${totaIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${elapsedSeconds} seconds | left: ~${timeLeft} seconds`);
|
|
||||||
}
|
|
||||||
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
|
|
||||||
const block = await bitcoinApi.$getBlock(blockHash);
|
|
||||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
|
|
||||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
|
||||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
|
||||||
} catch (e) {
|
|
||||||
logger.err(`Something went wrong while indexing blocks.` + e);
|
|
||||||
}
|
}
|
||||||
|
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
|
||||||
|
const block = await bitcoinApi.$getBlock(blockHash);
|
||||||
|
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
|
||||||
|
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||||
|
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||||
}
|
}
|
||||||
|
|
||||||
currentBlockHeight -= chunkSize;
|
currentBlockHeight -= chunkSize;
|
||||||
}
|
}
|
||||||
logger.info('Block indexing completed');
|
logger.info('Block indexing completed');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('An error occured in $generateBlockDatabase(). Skipping block indexing. ' + e);
|
logger.err('An error occured in $generateBlockDatabase(). Trying again later. ' + e);
|
||||||
console.log(e);
|
this.blockIndexingStarted = false;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.blockIndexingCompleted = true;
|
this.blockIndexingCompleted = true;
|
||||||
|
|
|
@ -90,7 +90,7 @@ class Mining {
|
||||||
/**
|
/**
|
||||||
* Return the historical hashrates and oldest indexed block timestamp for one or all pools
|
* Return the historical hashrates and oldest indexed block timestamp for one or all pools
|
||||||
*/
|
*/
|
||||||
public async $getPoolsHistoricalHashrates(interval: string | null, poolId: number): Promise<object> {
|
public async $getPoolsHistoricalHashrates(interval: string | null, poolId: number): Promise<object> {
|
||||||
return await HashratesRepository.$getPoolsWeeklyHashrate(interval);
|
return await HashratesRepository.$getPoolsWeeklyHashrate(interval);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,106 +108,112 @@ class Mining {
|
||||||
if (!blocks.blockIndexingCompleted || this.hashrateIndexingStarted) {
|
if (!blocks.blockIndexingCompleted || this.hashrateIndexingStarted) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.hashrateIndexingStarted = true;
|
|
||||||
|
|
||||||
logger.info(`Indexing hashrates`);
|
try {
|
||||||
|
this.hashrateIndexingStarted = true;
|
||||||
|
|
||||||
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
|
logger.info(`Indexing hashrates`);
|
||||||
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
|
||||||
let startedAt = new Date().getTime() / 1000;
|
|
||||||
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
|
||||||
const lastMidnight = new Date();
|
|
||||||
lastMidnight.setUTCHours(0); lastMidnight.setUTCMinutes(0); lastMidnight.setUTCSeconds(0); lastMidnight.setUTCMilliseconds(0);
|
|
||||||
let toTimestamp = Math.round(lastMidnight.getTime() / 1000);
|
|
||||||
let indexedThisRun = 0;
|
|
||||||
let totalIndexed = 0;
|
|
||||||
|
|
||||||
const hashrates: any[] = [];
|
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
|
||||||
|
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||||
|
let startedAt = new Date().getTime() / 1000;
|
||||||
|
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||||
|
const lastMidnight = new Date();
|
||||||
|
lastMidnight.setUTCHours(0); lastMidnight.setUTCMinutes(0); lastMidnight.setUTCSeconds(0); lastMidnight.setUTCMilliseconds(0);
|
||||||
|
let toTimestamp = Math.round(lastMidnight.getTime() / 1000);
|
||||||
|
let indexedThisRun = 0;
|
||||||
|
let totalIndexed = 0;
|
||||||
|
|
||||||
while (toTimestamp > genesisTimestamp) {
|
const hashrates: any[] = [];
|
||||||
const fromTimestamp = toTimestamp - 86400;
|
|
||||||
if (indexedTimestamp.includes(fromTimestamp)) {
|
|
||||||
toTimestamp -= 86400;
|
|
||||||
++totalIndexed;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
while (toTimestamp > genesisTimestamp) {
|
||||||
null, fromTimestamp, toTimestamp);
|
const fromTimestamp = toTimestamp - 86400;
|
||||||
if (blockStats.blockCount === 0) { // We are done indexing, no blocks left
|
if (indexedTimestamp.includes(fromTimestamp)) {
|
||||||
break;
|
toTimestamp -= 86400;
|
||||||
}
|
++totalIndexed;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let lastBlockHashrate = 0;
|
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||||
lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
null, fromTimestamp, toTimestamp);
|
||||||
blockStats.lastBlockHeight);
|
if (blockStats.blockCount === 0) { // We are done indexing, no blocks left
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
if (totalIndexed % 7 === 0 && !indexedTimestamp.includes(fromTimestamp + 1)) { // Save weekly pools hashrate
|
let lastBlockHashrate = 0;
|
||||||
logger.debug("Indexing weekly hashrates for mining pools");
|
lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||||
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp - 604800, fromTimestamp);
|
blockStats.lastBlockHeight);
|
||||||
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
|
||||||
pools = pools.map((pool: any) => {
|
if (totalIndexed > 7 && totalIndexed % 7 === 0 && !indexedTimestamp.includes(fromTimestamp + 1)) { // Save weekly pools hashrate
|
||||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
logger.debug("Indexing weekly hashrates for mining pools");
|
||||||
pool.share = (pool.blockCount / totalBlocks);
|
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp - 604800, fromTimestamp);
|
||||||
return pool;
|
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
||||||
|
pools = pools.map((pool: any) => {
|
||||||
|
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||||
|
pool.share = (pool.blockCount / totalBlocks);
|
||||||
|
return pool;
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const pool of pools) {
|
||||||
|
hashrates.push({
|
||||||
|
hashrateTimestamp: fromTimestamp + 1,
|
||||||
|
avgHashrate: pool['hashrate'],
|
||||||
|
poolId: pool.poolId,
|
||||||
|
share: pool['share'],
|
||||||
|
type: 'weekly',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hashrates.push({
|
||||||
|
hashrateTimestamp: fromTimestamp,
|
||||||
|
avgHashrate: lastBlockHashrate,
|
||||||
|
poolId: null,
|
||||||
|
share: 1,
|
||||||
|
type: 'daily',
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const pool of pools) {
|
if (hashrates.length > 10) {
|
||||||
hashrates.push({
|
await HashratesRepository.$saveHashrates(hashrates);
|
||||||
hashrateTimestamp: fromTimestamp + 1,
|
hashrates.length = 0;
|
||||||
avgHashrate: pool['hashrate'],
|
|
||||||
poolId: pool.poolId,
|
|
||||||
share: pool['share'],
|
|
||||||
type: 'weekly',
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||||
|
if (elapsedSeconds > 5) {
|
||||||
|
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
||||||
|
const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
|
||||||
|
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
|
||||||
|
logger.debug(`Getting hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
|
||||||
|
startedAt = new Date().getTime() / 1000;
|
||||||
|
indexedThisRun = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
toTimestamp -= 86400;
|
||||||
|
++indexedThisRun;
|
||||||
|
++totalIndexed;
|
||||||
}
|
}
|
||||||
|
|
||||||
hashrates.push({
|
// Add genesis block manually
|
||||||
hashrateTimestamp: fromTimestamp,
|
if (toTimestamp <= genesisTimestamp && !indexedTimestamp.includes(genesisTimestamp)) {
|
||||||
avgHashrate: lastBlockHashrate,
|
hashrates.push({
|
||||||
poolId: null,
|
hashrateTimestamp: genesisTimestamp,
|
||||||
share: 1,
|
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
|
||||||
type: 'daily',
|
poolId: null,
|
||||||
});
|
type: 'daily',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (hashrates.length > 10) {
|
if (hashrates.length > 0) {
|
||||||
await HashratesRepository.$saveHashrates(hashrates);
|
await HashratesRepository.$saveHashrates(hashrates);
|
||||||
hashrates.length = 0;
|
|
||||||
}
|
}
|
||||||
|
await HashratesRepository.$setLatestRunTimestamp();
|
||||||
|
this.hashrateIndexingStarted = false;
|
||||||
|
|
||||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
logger.info(`Hashrates indexing completed`);
|
||||||
if (elapsedSeconds > 5) {
|
} catch (e) {
|
||||||
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
|
this.hashrateIndexingStarted = false;
|
||||||
const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
|
throw e;
|
||||||
const daysLeft = Math.round(totalDayIndexed - totalIndexed);
|
|
||||||
logger.debug(`Getting hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
|
|
||||||
startedAt = new Date().getTime() / 1000;
|
|
||||||
indexedThisRun = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
toTimestamp -= 86400;
|
|
||||||
++indexedThisRun;
|
|
||||||
++totalIndexed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add genesis block manually
|
|
||||||
if (toTimestamp <= genesisTimestamp && !indexedTimestamp.includes(genesisTimestamp)) {
|
|
||||||
hashrates.push({
|
|
||||||
hashrateTimestamp: genesisTimestamp,
|
|
||||||
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
|
|
||||||
poolId: null,
|
|
||||||
type: 'daily',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hashrates.length > 0) {
|
|
||||||
await HashratesRepository.$saveHashrates(hashrates);
|
|
||||||
}
|
|
||||||
await HashratesRepository.$setLatestRunTimestamp();
|
|
||||||
this.hashrateIndexingStarted = false;
|
|
||||||
|
|
||||||
logger.info(`Hashrates indexing completed`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -96,8 +96,8 @@ class Server {
|
||||||
await Common.sleep(5000);
|
await Common.sleep(5000);
|
||||||
await databaseMigration.$truncateIndexedData(tables);
|
await databaseMigration.$truncateIndexedData(tables);
|
||||||
}
|
}
|
||||||
await this.$resetHashratesIndexingState();
|
|
||||||
await databaseMigration.$initializeOrMigrateDatabase();
|
await databaseMigration.$initializeOrMigrateDatabase();
|
||||||
|
await this.$resetHashratesIndexingState();
|
||||||
await poolsParser.migratePoolsJson();
|
await poolsParser.migratePoolsJson();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error(e instanceof Error ? e.message : 'Error');
|
throw new Error(e instanceof Error ? e.message : 'Error');
|
||||||
|
|
|
@ -22,6 +22,7 @@ class HashratesRepository {
|
||||||
await connection.query(query);
|
await connection.query(query);
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e));
|
logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e));
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
|
|
||||||
connection.release();
|
connection.release();
|
||||||
|
|
Loading…
Add table
Reference in a new issue