2022-01-19 10:50:52 +01:00
|
|
|
import {readFileSync} from 'fs';
|
|
|
|
import { DB } from '../database';
|
|
|
|
import logger from '../logger';
|
|
|
|
|
|
|
|
interface Pool {
|
2022-01-20 08:34:14 +01:00
|
|
|
name: string;
|
|
|
|
link: string;
|
|
|
|
regexes: string[];
|
|
|
|
addresses: string[];
|
2022-01-19 10:50:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
class PoolsParser {
|
|
|
|
/**
|
|
|
|
* Parse the pools.json file, consolidate the data and dump it into the database
|
|
|
|
*/
|
|
|
|
public async migratePoolsJson() {
|
2022-01-20 05:53:08 +01:00
|
|
|
const connection = await DB.pool.getConnection();
|
2022-01-19 10:50:52 +01:00
|
|
|
logger.info('Importing pools.json to the database');
|
|
|
|
|
2022-01-20 05:53:08 +01:00
|
|
|
// Get existing pools from the db
|
2022-01-20 08:34:14 +01:00
|
|
|
const [existingPools] = await connection.query<any>({ sql: 'SELECT * FROM pools;', timeout: 120000 });
|
2022-01-19 10:50:52 +01:00
|
|
|
|
2022-01-20 05:53:08 +01:00
|
|
|
logger.info('Open ./pools.json');
|
2022-01-20 08:34:14 +01:00
|
|
|
const fileContent: string = readFileSync('./pools.json', 'utf8');
|
2022-01-19 10:50:52 +01:00
|
|
|
const poolsJson: object = JSON.parse(fileContent);
|
|
|
|
|
|
|
|
// First we save every entries without paying attention to pool duplication
|
2022-01-20 08:34:14 +01:00
|
|
|
const poolsDuplicated: Pool[] = [];
|
2022-01-19 10:50:52 +01:00
|
|
|
|
|
|
|
logger.info('Parse coinbase_tags');
|
|
|
|
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
|
|
|
for (let i = 0; i < coinbaseTags.length; ++i) {
|
|
|
|
poolsDuplicated.push({
|
|
|
|
'name': (<Pool>coinbaseTags[i][1]).name,
|
|
|
|
'link': (<Pool>coinbaseTags[i][1]).link,
|
|
|
|
'regexes': [coinbaseTags[i][0]],
|
|
|
|
'addresses': [],
|
|
|
|
});
|
|
|
|
}
|
|
|
|
logger.info('Parse payout_addresses');
|
|
|
|
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
|
|
|
for (let i = 0; i < addressesTags.length; ++i) {
|
|
|
|
poolsDuplicated.push({
|
|
|
|
'name': (<Pool>addressesTags[i][1]).name,
|
|
|
|
'link': (<Pool>addressesTags[i][1]).link,
|
|
|
|
'regexes': [],
|
|
|
|
'addresses': [addressesTags[i][0]],
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// Then, we find unique mining pool names
|
|
|
|
logger.info('Identify unique mining pools');
|
2022-01-20 08:34:14 +01:00
|
|
|
const poolNames: string[] = [];
|
2022-01-19 10:50:52 +01:00
|
|
|
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
|
|
|
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
|
|
|
poolNames.push(poolsDuplicated[i].name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
logger.info(`Found ${poolNames.length} unique mining pools`);
|
|
|
|
|
|
|
|
// Finally, we generate the final consolidated pools data
|
2022-01-20 08:34:14 +01:00
|
|
|
const finalPoolDataAdd: Pool[] = [];
|
|
|
|
const finalPoolDataUpdate: Pool[] = [];
|
2022-01-19 10:50:52 +01:00
|
|
|
for (let i = 0; i < poolNames.length; ++i) {
|
|
|
|
let allAddresses: string[] = [];
|
|
|
|
let allRegexes: string[] = [];
|
2022-01-20 08:34:14 +01:00
|
|
|
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
|
2022-01-19 10:50:52 +01:00
|
|
|
|
|
|
|
for (let y = 0; y < match.length; ++y) {
|
|
|
|
allAddresses = allAddresses.concat(match[y].addresses);
|
|
|
|
allRegexes = allRegexes.concat(match[y].regexes);
|
|
|
|
}
|
|
|
|
|
2022-01-20 08:34:14 +01:00
|
|
|
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
|
2022-01-20 05:53:08 +01:00
|
|
|
|
2022-01-20 08:34:14 +01:00
|
|
|
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
|
|
|
|
logger.debug(`Update '${finalPoolName}' mining pool`);
|
2022-01-20 05:53:08 +01:00
|
|
|
finalPoolDataUpdate.push({
|
|
|
|
'name': finalPoolName,
|
|
|
|
'link': match[0].link,
|
|
|
|
'regexes': allRegexes,
|
|
|
|
'addresses': allAddresses,
|
2022-01-20 08:34:14 +01:00
|
|
|
});
|
2022-01-20 05:53:08 +01:00
|
|
|
} else {
|
2022-01-20 08:34:14 +01:00
|
|
|
logger.debug(`Add '${finalPoolName}' mining pool`);
|
2022-01-20 05:53:08 +01:00
|
|
|
finalPoolDataAdd.push({
|
|
|
|
'name': finalPoolName,
|
|
|
|
'link': match[0].link,
|
|
|
|
'regexes': allRegexes,
|
|
|
|
'addresses': allAddresses,
|
2022-01-20 08:34:14 +01:00
|
|
|
});
|
2022-01-20 05:53:08 +01:00
|
|
|
}
|
2022-01-19 10:50:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Manually add the 'unknown pool'
|
2022-01-20 08:56:25 +01:00
|
|
|
if (existingPools.find((pool) => pool.name === 'Unknown') === undefined) {
|
2022-01-20 05:53:08 +01:00
|
|
|
finalPoolDataAdd.push({
|
|
|
|
'name': 'Unknown',
|
|
|
|
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
|
|
|
|
regexes: [],
|
|
|
|
addresses: [],
|
2022-01-20 08:34:14 +01:00
|
|
|
});
|
2022-01-20 05:53:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(`Update pools table now`);
|
|
|
|
|
|
|
|
// Add new mining pools into the database
|
|
|
|
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses) VALUES ';
|
|
|
|
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
|
|
|
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
|
|
|
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}'),`;
|
|
|
|
}
|
|
|
|
queryAdd = queryAdd.slice(0, -1) + ';';
|
|
|
|
|
|
|
|
// Add new mining pools into the database
|
2022-01-20 08:34:14 +01:00
|
|
|
const updateQueries: string[] = [];
|
2022-01-20 05:53:08 +01:00
|
|
|
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
|
|
|
updateQueries.push(`
|
|
|
|
UPDATE pools
|
|
|
|
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
|
|
|
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}'
|
|
|
|
WHERE name='${finalPoolDataUpdate[i].name}'
|
|
|
|
;`);
|
2022-01-19 10:50:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
2022-01-20 05:53:08 +01:00
|
|
|
if (finalPoolDataAdd.length > 0) {
|
|
|
|
await connection.query<any>({ sql: queryAdd, timeout: 120000 });
|
|
|
|
}
|
|
|
|
updateQueries.forEach(async query => {
|
|
|
|
await connection.query<any>({ sql: query, timeout: 120000 });
|
|
|
|
});
|
2022-01-19 10:50:52 +01:00
|
|
|
connection.release();
|
|
|
|
logger.info('Import completed');
|
|
|
|
} catch (e) {
|
|
|
|
connection.release();
|
|
|
|
logger.info(`Unable to import pools in the database!`);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2022-01-20 08:34:14 +01:00
|
|
|
export default new PoolsParser();
|