Merge pull request #1302 from nymkappa/bugfix/weekly-pool-hashrate-indexing

Fix hashrate indexing backend logic (split daily/weekly indexing logic, timezone issue, unclosed db connection)
This commit is contained in:
softsimon 2022-03-06 20:32:17 +01:00 committed by GitHub
commit 16331d1be7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 356 additions and 164 deletions

View File

@ -171,7 +171,7 @@ class Blocks {
} }
/** /**
* Index all blocks metadata for the mining dashboard * [INDEXING] Index all blocks metadata for the mining dashboard
*/ */
public async $generateBlockDatabase() { public async $generateBlockDatabase() {
if (this.blockIndexingStarted) { if (this.blockIndexingStarted) {

View File

@ -6,7 +6,7 @@ import logger from '../logger';
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms)); const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
class DatabaseMigration { class DatabaseMigration {
private static currentVersion = 8; private static currentVersion = 9;
private queryTimeout = 120000; private queryTimeout = 120000;
private statisticsAddedIndexed = false; private statisticsAddedIndexed = false;
@ -133,6 +133,13 @@ class DatabaseMigration {
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"'); await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
} }
if (databaseSchemaVersion < 9) {
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.'`);
await this.$executeQuery(connection, 'TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery(connection, 'ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
await this.$executeQuery(connection, 'ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
}
connection.release(); connection.release();
} catch (e) { } catch (e) {
connection.release(); connection.release();
@ -276,6 +283,10 @@ class DatabaseMigration {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_hashrates_indexing', 0, NULL)`); queries.push(`INSERT INTO state(name, number, string) VALUES ('last_hashrates_indexing', 0, NULL)`);
} }
if (version < 9) {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_weekly_hashrates_indexing', 0, NULL)`);
}
return queries; return queries;
} }

View File

@ -8,6 +8,7 @@ import blocks from './blocks';
class Mining { class Mining {
hashrateIndexingStarted = false; hashrateIndexingStarted = false;
weeklyHashrateIndexingStarted = false;
constructor() { constructor() {
} }
@ -74,60 +75,134 @@ class Mining {
} }
/** /**
* Return the historical difficulty adjustments and oldest indexed block timestamp * [INDEXING] Generate weekly mining pool hashrate history
*/ */
public async $getHistoricalDifficulty(interval: string | null): Promise<object> { public async $generatePoolHashrateHistory(): Promise<void> {
return await BlocksRepository.$getBlocksDifficulty(interval); if (!blocks.blockIndexingCompleted || this.weeklyHashrateIndexingStarted) {
}
/**
* Return the historical hashrates and oldest indexed block timestamp
*/
public async $getNetworkHistoricalHashrates(interval: string | null): Promise<object> {
return await HashratesRepository.$getNetworkDailyHashrate(interval);
}
/**
* Return the historical hashrates and oldest indexed block timestamp for one or all pools
*/
public async $getPoolsHistoricalHashrates(interval: string | null, poolId: number): Promise<object> {
return await HashratesRepository.$getPoolsWeeklyHashrate(interval);
}
/**
* Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// We only run this once a day
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp();
const now = new Date().getTime() / 1000;
if (now - latestTimestamp < 86400) {
return; return;
} }
// We only run this once a week
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_weekly_hashrates_indexing');
const now = new Date().getTime() / 1000;
if (now - latestTimestamp < 604800) {
return;
}
try {
this.weeklyHashrateIndexingStarted = true;
logger.info(`Indexing mining pools weekly hashrates`);
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
const hashrates: any[] = [];
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const lastMidnight = this.getDateMidnight(new Date());
let toTimestamp = Math.round((lastMidnight.getTime() - 604800) / 1000);
const totalWeekIndexed = (await BlocksRepository.$blockCount(null, null)) / 1008;
let indexedThisRun = 0;
let totalIndexed = 0;
let startedAt = new Date().getTime() / 1000;
while (toTimestamp > genesisTimestamp) {
const fromTimestamp = toTimestamp - 604800;
// Skip already indexed weeks
if (indexedTimestamp.includes(toTimestamp + 1)) {
toTimestamp -= 604800;
++totalIndexed;
continue;
}
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
null, fromTimestamp, toTimestamp);
if (blockStats.blockCount === 0) { // We are done indexing, no blocks left
break;
}
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
blockStats.lastBlockHeight);
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp, toTimestamp);
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
pools = pools.map((pool: any) => {
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
pool.share = (pool.blockCount / totalBlocks);
return pool;
});
for (const pool of pools) {
hashrates.push({
hashrateTimestamp: toTimestamp + 1,
avgHashrate: pool['hashrate'],
poolId: pool.poolId,
share: pool['share'],
type: 'weekly',
});
}
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
if (elapsedSeconds > 5) {
const weeksPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
const weeksLeft = Math.round(totalWeekIndexed - totalIndexed);
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds} weeks/sec | ~${weeksLeft} weeks left to index`);
startedAt = new Date().getTime() / 1000;
indexedThisRun = 0;
}
toTimestamp -= 604800;
++indexedThisRun;
++totalIndexed;
}
this.weeklyHashrateIndexingStarted = false;
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing');
logger.info(`Weekly pools hashrate indexing completed`);
} catch (e) {
this.weeklyHashrateIndexingStarted = false;
throw e;
}
}
/**
* [INDEXING] Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
if (!blocks.blockIndexingCompleted || this.hashrateIndexingStarted) { if (!blocks.blockIndexingCompleted || this.hashrateIndexingStarted) {
return; return;
} }
// We only run this once a day
const latestTimestamp = await HashratesRepository.$getLatestRunTimestamp('last_hashrates_indexing');
const now = new Date().getTime() / 1000;
if (now - latestTimestamp < 86400) {
return;
}
try { try {
this.hashrateIndexingStarted = true; this.hashrateIndexingStarted = true;
logger.info(`Indexing hashrates`); logger.info(`Indexing network daily hashrate`);
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const lastMidnight = this.getDateMidnight(new Date());
let toTimestamp = Math.round(lastMidnight.getTime() / 1000);
const hashrates: any[] = [];
const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144; const totalDayIndexed = (await BlocksRepository.$blockCount(null, null)) / 144;
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
let startedAt = new Date().getTime() / 1000;
const genesisTimestamp = 1231006505; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
const lastMidnight = new Date();
lastMidnight.setUTCHours(0); lastMidnight.setUTCMinutes(0); lastMidnight.setUTCSeconds(0); lastMidnight.setUTCMilliseconds(0);
let toTimestamp = Math.round(lastMidnight.getTime() / 1000);
let indexedThisRun = 0; let indexedThisRun = 0;
let totalIndexed = 0; let totalIndexed = 0;
let startedAt = new Date().getTime() / 1000;
const hashrates: any[] = [];
while (toTimestamp > genesisTimestamp) { while (toTimestamp > genesisTimestamp) {
const fromTimestamp = toTimestamp - 86400; const fromTimestamp = toTimestamp - 86400;
// Skip already indexed weeks
if (indexedTimestamp.includes(fromTimestamp)) { if (indexedTimestamp.includes(fromTimestamp)) {
toTimestamp -= 86400; toTimestamp -= 86400;
++totalIndexed; ++totalIndexed;
@ -140,33 +215,11 @@ class Mining {
break; break;
} }
let lastBlockHashrate = 0; const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
blockStats.lastBlockHeight); blockStats.lastBlockHeight);
if (totalIndexed > 7 && totalIndexed % 7 === 0 && !indexedTimestamp.includes(fromTimestamp + 1)) { // Save weekly pools hashrate
logger.debug("Indexing weekly hashrates for mining pools");
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp - 604800, fromTimestamp);
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
pools = pools.map((pool: any) => {
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
pool.share = (pool.blockCount / totalBlocks);
return pool;
});
for (const pool of pools) {
hashrates.push({ hashrates.push({
hashrateTimestamp: fromTimestamp + 1, hashrateTimestamp: toTimestamp,
avgHashrate: pool['hashrate'],
poolId: pool.poolId,
share: pool['share'],
type: 'weekly',
});
}
}
hashrates.push({
hashrateTimestamp: fromTimestamp,
avgHashrate: lastBlockHashrate, avgHashrate: lastBlockHashrate,
poolId: null, poolId: null,
share: 1, share: 1,
@ -183,7 +236,7 @@ class Mining {
const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2); const daysPerSeconds = (indexedThisRun / elapsedSeconds).toFixed(2);
const formattedDate = new Date(fromTimestamp * 1000).toUTCString(); const formattedDate = new Date(fromTimestamp * 1000).toUTCString();
const daysLeft = Math.round(totalDayIndexed - totalIndexed); const daysLeft = Math.round(totalDayIndexed - totalIndexed);
logger.debug(`Getting hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`); logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds} days/sec | ~${daysLeft} days left to index`);
startedAt = new Date().getTime() / 1000; startedAt = new Date().getTime() / 1000;
indexedThisRun = 0; indexedThisRun = 0;
} }
@ -203,18 +256,25 @@ class Mining {
}); });
} }
if (hashrates.length > 0) {
await HashratesRepository.$saveHashrates(hashrates); await HashratesRepository.$saveHashrates(hashrates);
}
await HashratesRepository.$setLatestRunTimestamp();
this.hashrateIndexingStarted = false;
logger.info(`Hashrates indexing completed`); await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing');
this.hashrateIndexingStarted = false;
logger.info(`Daily network hashrate indexing completed`);
} catch (e) { } catch (e) {
this.hashrateIndexingStarted = false; this.hashrateIndexingStarted = false;
throw e; throw e;
} }
} }
private getDateMidnight(date: Date): Date {
date.setUTCHours(0);
date.setUTCMinutes(0);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
return date;
}
} }
export default new Mining(); export default new Mining();

View File

@ -11,6 +11,7 @@ export class DB {
password: config.DATABASE.PASSWORD, password: config.DATABASE.PASSWORD,
connectionLimit: 10, connectionLimit: 10,
supportBigNumbers: true, supportBigNumbers: true,
timezone: '+00:00',
}); });
} }

View File

@ -167,7 +167,8 @@ class Server {
} }
async $resetHashratesIndexingState() { async $resetHashratesIndexingState() {
return await HashratesRepository.$setLatestRunTimestamp(0); await HashratesRepository.$setLatestRunTimestamp('last_hashrates_indexing', 0);
await HashratesRepository.$setLatestRunTimestamp('last_weekly_hashrates_indexing', 0);
} }
async $runIndexingWhenReady() { async $runIndexingWhenReady() {
@ -176,8 +177,9 @@ class Server {
} }
try { try {
await blocks.$generateBlockDatabase(); blocks.$generateBlockDatabase();
await mining.$generateNetworkHashrateHistory(); mining.$generateNetworkHashrateHistory();
mining.$generatePoolHashrateHistory();
} catch (e) { } catch (e) {
logger.err(`Unable to run indexing right now, trying again later. ` + e); logger.err(`Unable to run indexing right now, trying again later. ` + e);
} }

View File

@ -53,15 +53,17 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
await connection.query(query, params); await connection.query(query, params);
connection.release();
} catch (e: any) { } catch (e: any) {
connection.release();
if (e.errno === 1062) { // ER_DUP_ENTRY if (e.errno === 1062) { // ER_DUP_ENTRY
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`); logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
} else { } else {
logger.err('$saveBlockInDatabase() error' + (e instanceof Error ? e.message : e));
}
}
connection.release(); connection.release();
logger.err('$saveBlockInDatabase() error' + (e instanceof Error ? e.message : e));
throw e;
}
}
} }
/** /**
@ -73,6 +75,7 @@ class BlocksRepository {
} }
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows]: any[] = await connection.query(` const [rows]: any[] = await connection.query(`
SELECT height SELECT height
FROM blocks FROM blocks
@ -87,6 +90,11 @@ class BlocksRepository {
const missingBlocksHeights = seekedBlocks.filter(x => indexedBlockHeights.indexOf(x) === -1); const missingBlocksHeights = seekedBlocks.filter(x => indexedBlockHeights.indexOf(x) === -1);
return missingBlocksHeights; return missingBlocksHeights;
} catch (e) {
connection.release();
logger.err('$getMissingBlocksBetweenHeights() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -111,10 +119,16 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, params); const [rows] = await connection.query(query, params);
connection.release(); connection.release();
return <EmptyBlocks[]>rows; return <EmptyBlocks[]>rows;
} catch (e) {
connection.release();
logger.err('$getEmptyBlocks() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -143,10 +157,16 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, params); const [rows] = await connection.query(query, params);
connection.release(); connection.release();
return <number>rows[0].blockCount; return <number>rows[0].blockCount;
} catch (e) {
connection.release();
logger.err('$blockCount() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -177,10 +197,16 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, params); const [rows] = await connection.query(query, params);
connection.release(); connection.release();
return <number>rows[0]; return <number>rows[0];
} catch (e) {
connection.release();
logger.err('$blockCountBetweenTimestamp() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -194,6 +220,7 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows]: any[] = await connection.query(query); const [rows]: any[] = await connection.query(query);
connection.release(); connection.release();
@ -202,15 +229,17 @@ class BlocksRepository {
} }
return <number>rows[0].blockTimestamp; return <number>rows[0].blockTimestamp;
} catch (e) {
connection.release();
logger.err('$oldestBlockTimestamp() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
* Get blocks mined by a specific mining pool * Get blocks mined by a specific mining pool
*/ */
public async $getBlocksByPool( public async $getBlocksByPool(poolId: number, startHeight: number | null = null): Promise<object[]> {
poolId: number,
startHeight: number | null = null
): Promise<object[]> {
const params: any[] = []; const params: any[] = [];
let query = `SELECT height, hash as id, tx_count, size, weight, pool_id, UNIX_TIMESTAMP(blockTimestamp) as timestamp, reward let query = `SELECT height, hash as id, tx_count, size, weight, pool_id, UNIX_TIMESTAMP(blockTimestamp) as timestamp, reward
FROM blocks FROM blocks
@ -227,6 +256,7 @@ class BlocksRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, params); const [rows] = await connection.query(query, params);
connection.release(); connection.release();
@ -235,6 +265,11 @@ class BlocksRepository {
} }
return <object[]>rows; return <object[]>rows;
} catch (e) {
connection.release();
logger.err('$getBlocksByPool() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -242,6 +277,7 @@ class BlocksRepository {
*/ */
public async $getBlockByHeight(height: number): Promise<object | null> { public async $getBlockByHeight(height: number): Promise<object | null> {
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows]: any[] = await connection.query(` const [rows]: any[] = await connection.query(`
SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.addresses as pool_addresses, pools.regexes as pool_regexes SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.addresses as pool_addresses, pools.regexes as pool_regexes
FROM blocks FROM blocks
@ -255,6 +291,11 @@ class BlocksRepository {
} }
return rows[0]; return rows[0];
} catch (e) {
connection.release();
logger.err('$getBlockByHeight() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -297,6 +338,7 @@ class BlocksRepository {
ORDER BY t.height ORDER BY t.height
`; `;
try {
const [rows]: any[] = await connection.query(query); const [rows]: any[] = await connection.query(query);
connection.release(); connection.release();
@ -305,13 +347,25 @@ class BlocksRepository {
} }
return rows; return rows;
} catch (e) {
connection.release();
logger.err('$getBlocksDifficulty() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
public async $getOldestIndexedBlockHeight(): Promise<number> { public async $getOldestIndexedBlockHeight(): Promise<number> {
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows]: any[] = await connection.query(`SELECT MIN(height) as minHeight FROM blocks`); const [rows]: any[] = await connection.query(`SELECT MIN(height) as minHeight FROM blocks`);
connection.release(); connection.release();
return rows[0].minHeight; return rows[0].minHeight;
} catch (e) {
connection.release();
logger.err('$getOldestIndexedBlockHeight() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
} }

View File

@ -8,6 +8,10 @@ class HashratesRepository {
* Save indexed block data in the database * Save indexed block data in the database
*/ */
public async $saveHashrates(hashrates: any) { public async $saveHashrates(hashrates: any) {
if (hashrates.length === 0) {
return;
}
let query = `INSERT INTO let query = `INSERT INTO
hashrates(hashrate_timestamp, avg_hashrate, pool_id, share, type) VALUES`; hashrates(hashrate_timestamp, avg_hashrate, pool_id, share, type) VALUES`;
@ -20,12 +24,12 @@ class HashratesRepository {
try { try {
// logger.debug(query); // logger.debug(query);
await connection.query(query); await connection.query(query);
connection.release();
} catch (e: any) { } catch (e: any) {
connection.release();
logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e)); logger.err('$saveHashrateInDatabase() error' + (e instanceof Error ? e.message : e));
throw e; throw e;
} }
connection.release();
} }
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> { public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
@ -47,10 +51,33 @@ class HashratesRepository {
query += ` ORDER by hashrate_timestamp`; query += ` ORDER by hashrate_timestamp`;
try {
const [rows]: any[] = await connection.query(query); const [rows]: any[] = await connection.query(query);
connection.release(); connection.release();
return rows; return rows;
} catch (e) {
connection.release();
logger.err('$getNetworkDailyHashrate() error' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getWeeklyHashrateTimestamps(): Promise<number[]> {
const connection = await DB.pool.getConnection();
const query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp FROM hashrates where type = 'weekly' GROUP BY hashrate_timestamp`;
try {
const [rows]: any[] = await connection.query(query);
connection.release();
return rows.map(row => row.timestamp);
} catch (e) {
connection.release();
logger.err('$getWeeklyHashrateTimestamps() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -77,26 +104,44 @@ class HashratesRepository {
query += ` ORDER by hashrate_timestamp, FIELD(pool_id, ${topPoolsId})`; query += ` ORDER by hashrate_timestamp, FIELD(pool_id, ${topPoolsId})`;
try {
const [rows]: any[] = await connection.query(query); const [rows]: any[] = await connection.query(query);
connection.release(); connection.release();
return rows; return rows;
} catch (e) {
connection.release();
logger.err('$getPoolsWeeklyHashrate() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
public async $setLatestRunTimestamp(val: any = null) { public async $setLatestRunTimestamp(key: string, val: any = null) {
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
const query = `UPDATE state SET number = ? WHERE name = 'last_hashrates_indexing'`; const query = `UPDATE state SET number = ? WHERE name = ?`;
await connection.query<any>(query, (val === null) ? [Math.round(new Date().getTime() / 1000)] : [val]); try {
await connection.query<any>(query, (val === null) ? [Math.round(new Date().getTime() / 1000), key] : [val, key]);
connection.release();
} catch (e) {
connection.release(); connection.release();
} }
}
public async $getLatestRunTimestamp(): Promise<number> { public async $getLatestRunTimestamp(key: string): Promise<number> {
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
const query = `SELECT number FROM state WHERE name = 'last_hashrates_indexing'`; const query = `SELECT number FROM state WHERE name = ?`;
const [rows] = await connection.query<any>(query);
try {
const [rows] = await connection.query<any>(query, [key]);
connection.release(); connection.release();
return rows[0]['number']; return rows[0]['number'];
} catch (e) {
connection.release();
logger.err('$setLatestRunTimestamp() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
} }

View File

@ -43,26 +43,38 @@ class PoolsRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query); const [rows] = await connection.query(query);
connection.release(); connection.release();
return <PoolInfo[]>rows; return <PoolInfo[]>rows;
} catch (e) {
connection.release();
logger.err('$getPoolsInfo() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
* Get basic pool info and block count between two timestamp * Get basic pool info and block count between two timestamp
*/ */
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> { public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
let query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
FROM pools FROM pools
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?) LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
GROUP BY pools.id`; GROUP BY pools.id`;
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, [from, to]); const [rows] = await connection.query(query, [from, to]);
connection.release(); connection.release();
return <PoolInfo[]>rows; return <PoolInfo[]>rows;
} catch (e) {
connection.release();
logger.err('$getPoolsInfoBetween() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
/** /**
@ -76,6 +88,7 @@ class PoolsRepository {
// logger.debug(query); // logger.debug(query);
const connection = await DB.pool.getConnection(); const connection = await DB.pool.getConnection();
try {
const [rows] = await connection.query(query, [poolId]); const [rows] = await connection.query(query, [poolId]);
connection.release(); connection.release();
@ -83,6 +96,11 @@ class PoolsRepository {
rows[0].addresses = JSON.parse(rows[0].addresses); rows[0].addresses = JSON.parse(rows[0].addresses);
return rows[0]; return rows[0];
} catch (e) {
connection.release();
logger.err('$getPool() error' + (e instanceof Error ? e.message : e));
throw e;
}
} }
} }

View File

@ -24,6 +24,7 @@ import miningStats from './api/mining';
import axios from 'axios'; import axios from 'axios';
import mining from './api/mining'; import mining from './api/mining';
import BlocksRepository from './repositories/BlocksRepository'; import BlocksRepository from './repositories/BlocksRepository';
import HashratesRepository from './repositories/HashratesRepository';
class Routes { class Routes {
constructor() {} constructor() {}
@ -576,7 +577,7 @@ class Routes {
public async $getHistoricalDifficulty(req: Request, res: Response) { public async $getHistoricalDifficulty(req: Request, res: Response) {
try { try {
const stats = await mining.$getHistoricalDifficulty(req.params.interval ?? null); const stats = await BlocksRepository.$getBlocksDifficulty(req.params.interval ?? null);
res.header('Pragma', 'public'); res.header('Pragma', 'public');
res.header('Cache-control', 'public'); res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString()); res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
@ -588,7 +589,7 @@ class Routes {
public async $getPoolsHistoricalHashrate(req: Request, res: Response) { public async $getPoolsHistoricalHashrate(req: Request, res: Response) {
try { try {
const hashrates = await mining.$getPoolsHistoricalHashrates(req.params.interval ?? null, parseInt(req.params.poolId, 10)); const hashrates = await HashratesRepository.$getPoolsWeeklyHashrate(req.params.interval ?? null);
const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp(); const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp();
res.header('Pragma', 'public'); res.header('Pragma', 'public');
res.header('Cache-control', 'public'); res.header('Cache-control', 'public');
@ -604,8 +605,8 @@ class Routes {
public async $getHistoricalHashrate(req: Request, res: Response) { public async $getHistoricalHashrate(req: Request, res: Response) {
try { try {
const hashrates = await mining.$getNetworkHistoricalHashrates(req.params.interval ?? null); const hashrates = await HashratesRepository.$getNetworkDailyHashrate(req.params.interval ?? null);
const difficulty = await mining.$getHistoricalDifficulty(req.params.interval ?? null); const difficulty = await BlocksRepository.$getBlocksDifficulty(req.params.interval ?? null);
const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp(); const oldestIndexedBlockTimestamp = await BlocksRepository.$oldestBlockTimestamp();
res.header('Pragma', 'public'); res.header('Pragma', 'public');
res.header('Cache-control', 'public'); res.header('Cache-control', 'public');