mirror of
https://github.com/mempool/mempool.git
synced 2025-03-13 11:36:07 +01:00
Import mining pools into the database - Increment db schema to 3
This commit is contained in:
parent
bc925a409f
commit
2848f56c2b
3 changed files with 137 additions and 3 deletions
|
@ -3,10 +3,10 @@ import config from '../config';
|
|||
import { DB } from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
const sleep = (ms: number) => new Promise( res => setTimeout(res, ms));
|
||||
const sleep = (ms: number) => new Promise(res => setTimeout(res, ms));
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 2;
|
||||
private static currentVersion = 3;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
|
||||
|
@ -83,6 +83,9 @@ class DatabaseMigration {
|
|||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(connection, `CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(connection, this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
connection.release();
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
|
@ -335,6 +338,17 @@ class DatabaseMigration {
|
|||
final_tx int(11) NOT NULL
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreatePoolsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS pools (
|
||||
id int(11) NOT NULL AUTO_INCREMENT,
|
||||
name varchar(50) NOT NULL,
|
||||
link varchar(255) NOT NULL,
|
||||
addresses text NOT NULL,
|
||||
regexes text NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;`;
|
||||
}
|
||||
}
|
||||
|
||||
export default new DatabaseMigration();
|
||||
export default new DatabaseMigration();
|
118
backend/src/api/pools-parser.ts
Normal file
118
backend/src/api/pools-parser.ts
Normal file
|
@ -0,0 +1,118 @@
|
|||
import {readFileSync} from 'fs';
|
||||
import { DB } from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
interface Pool {
|
||||
name: string,
|
||||
link: string,
|
||||
regexes: string[],
|
||||
addresses: string[],
|
||||
}
|
||||
|
||||
class PoolsParser {
|
||||
/**
|
||||
* Parse the pools.json file, consolidate the data and dump it into the database
|
||||
*/
|
||||
public async migratePoolsJson() {
|
||||
logger.info('Importing pools.json to the database');
|
||||
let connection = await DB.pool.getConnection();
|
||||
|
||||
// Check if the pools table does not have data already, for now we do not support updating it
|
||||
// but that will come in a later version
|
||||
let [rows] = await connection.query<any>({ sql: 'SELECT count(id) as count from pools;', timeout: 120000 });
|
||||
if (rows[0].count !== 0) {
|
||||
logger.info('Pools table already contain data, updating it is not yet supported, skipping.');
|
||||
connection.release();
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Open ../frontend/cypress/fixtures/pools.json');
|
||||
const fileContent: string = readFileSync('../frontend/cypress/fixtures/pools.json','utf8');
|
||||
const poolsJson: object = JSON.parse(fileContent);
|
||||
|
||||
// First we save every entries without paying attention to pool duplication
|
||||
let poolsDuplicated: Pool[] = [];
|
||||
|
||||
logger.info('Parse coinbase_tags');
|
||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
'name': (<Pool>coinbaseTags[i][1]).name,
|
||||
'link': (<Pool>coinbaseTags[i][1]).link,
|
||||
'regexes': [coinbaseTags[i][0]],
|
||||
'addresses': [],
|
||||
});
|
||||
}
|
||||
logger.info('Parse payout_addresses');
|
||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
||||
for (let i = 0; i < addressesTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
'name': (<Pool>addressesTags[i][1]).name,
|
||||
'link': (<Pool>addressesTags[i][1]).link,
|
||||
'regexes': [],
|
||||
'addresses': [addressesTags[i][0]],
|
||||
});
|
||||
}
|
||||
|
||||
// Then, we find unique mining pool names
|
||||
logger.info('Identify unique mining pools');
|
||||
let poolNames : string[] = [];
|
||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
||||
poolNames.push(poolsDuplicated[i].name);
|
||||
}
|
||||
}
|
||||
logger.info(`Found ${poolNames.length} unique mining pools`);
|
||||
|
||||
// Finally, we generate the final consolidated pools data
|
||||
let finalPoolData: Pool[] = [];
|
||||
for (let i = 0; i < poolNames.length; ++i) {
|
||||
let allAddresses: string[] = [];
|
||||
let allRegexes: string[] = [];
|
||||
let match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
|
||||
|
||||
for (let y = 0; y < match.length; ++y) {
|
||||
allAddresses = allAddresses.concat(match[y].addresses);
|
||||
allRegexes = allRegexes.concat(match[y].regexes);
|
||||
}
|
||||
|
||||
finalPoolData.push({
|
||||
'name': poolNames[i].replace("'", "''"),
|
||||
'link': match[0].link,
|
||||
'regexes': allRegexes,
|
||||
'addresses': allAddresses,
|
||||
})
|
||||
}
|
||||
|
||||
// Manually add the 'unknown pool'
|
||||
finalPoolData.push({
|
||||
'name': 'Unknown',
|
||||
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
regexes: [],
|
||||
addresses: [],
|
||||
})
|
||||
|
||||
// Dump everything into the database
|
||||
logger.info(`Insert mining pool info into the database`);
|
||||
let query: string = 'INSERT INTO pools(name, link, regexes, addresses) VALUES ';
|
||||
for (let i = 0; i < finalPoolData.length; ++i) {
|
||||
query += `('${finalPoolData[i].name}', '${finalPoolData[i].link}',
|
||||
'${JSON.stringify(finalPoolData[i].regexes)}', '${JSON.stringify(finalPoolData[i].addresses)}'),`;
|
||||
}
|
||||
query = query.slice(0, -1) + ';';
|
||||
|
||||
try {
|
||||
await connection.query<any>({ sql: 'DELETE FROM pools;', timeout: 120000 }); // We clear the table before insertion
|
||||
await connection.query<any>({ sql: query, timeout: 120000 });
|
||||
connection.release();
|
||||
logger.info('Import completed');
|
||||
} catch (e) {
|
||||
connection.release();
|
||||
logger.info(`Unable to import pools in the database!`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new PoolsParser();
|
|
@ -22,6 +22,7 @@ import loadingIndicators from './api/loading-indicators';
|
|||
import mempool from './api/mempool';
|
||||
import elementsParser from './api/liquid/elements-parser';
|
||||
import databaseMigration from './api/database-migration';
|
||||
import poolsParser from './api/pools-parser';
|
||||
import syncAssets from './sync-assets';
|
||||
import icons from './api/liquid/icons';
|
||||
import { Common } from './api/common';
|
||||
|
@ -88,6 +89,7 @@ class Server {
|
|||
await checkDbConnection();
|
||||
try {
|
||||
await databaseMigration.$initializeOrMigrateDatabase();
|
||||
await poolsParser.migratePoolsJson();
|
||||
} catch (e) {
|
||||
throw new Error(e instanceof Error ? e.message : 'Error');
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue