mirror of
https://github.com/mempool/mempool.git
synced 2024-11-20 02:11:49 +01:00
Merge branch 'master' into knorrium/backend_unit_tests
This commit is contained in:
commit
d2bae2fa8b
@ -1,6 +1,7 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import config from '../config';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
|
||||
interface Pool {
|
||||
name: string;
|
||||
@ -32,7 +33,6 @@ class PoolsParser {
|
||||
// First we save every entries without paying attention to pool duplication
|
||||
const poolsDuplicated: Pool[] = [];
|
||||
|
||||
logger.debug('Parse coinbase_tags');
|
||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
@ -43,7 +43,6 @@ class PoolsParser {
|
||||
'slug': ''
|
||||
});
|
||||
}
|
||||
logger.debug('Parse payout_addresses');
|
||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
||||
for (let i = 0; i < addressesTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
@ -56,7 +55,6 @@ class PoolsParser {
|
||||
}
|
||||
|
||||
// Then, we find unique mining pool names
|
||||
logger.debug('Identify unique mining pools');
|
||||
const poolNames: string[] = [];
|
||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
||||
@ -119,8 +117,15 @@ class PoolsParser {
|
||||
'slug': slug
|
||||
};
|
||||
|
||||
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
|
||||
if (existingPool !== undefined) {
|
||||
// Check if any data was actually updated
|
||||
const equals = (a, b) =>
|
||||
a.length === b.length &&
|
||||
a.every((v, i) => v === b[i]);
|
||||
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
@ -140,40 +145,51 @@ class PoolsParser {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Update pools table now`);
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
|
||||
logger.debug(`Update pools table now`);
|
||||
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
||||
}
|
||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
||||
}
|
||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
||||
|
||||
// Updated existing mining pools in the database
|
||||
const updateQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
||||
updateQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
||||
slug='${finalPoolDataUpdate[i].slug}'
|
||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
||||
;`);
|
||||
// Updated existing mining pools in the database
|
||||
const updateQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
||||
updateQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
||||
slug='${finalPoolDataUpdate[i].slug}'
|
||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
||||
;`);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
logger.err(`Cannot insert unknown pool in the database`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@ -201,6 +217,32 @@ class PoolsParser {
|
||||
logger.err('Unable to insert "Unknown" mining pool');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete blocks which needs to be reindexed
|
||||
*/
|
||||
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
if (blockCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const updatedPool of finalPoolDataUpdate) {
|
||||
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
||||
if (pool.length > 0) {
|
||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore early days of Bitcoin as there were not mining pool yet
|
||||
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
|
||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
||||
|
||||
logger.notice('Truncating hashrates for future re-indexing');
|
||||
await DB.query(`DELETE FROM hashrates`);
|
||||
}
|
||||
}
|
||||
|
||||
export default new PoolsParser();
|
||||
|
@ -183,6 +183,9 @@ case $OS in
|
||||
TOR_PKG=tor
|
||||
TOR_USER=_tor
|
||||
TOR_GROUP=_tor
|
||||
NGINX_USER=www
|
||||
NGINX_ETC_FOLDER=/usr/local/etc/nginx
|
||||
NGINX_CONFIGURATION=/usr/local/etc/nginx/nginx.conf
|
||||
CERTBOT_PKG=py39-certbot
|
||||
;;
|
||||
|
||||
@ -197,6 +200,7 @@ case $OS in
|
||||
TOR_USER=debian-tor
|
||||
TOR_GROUP=debian-tor
|
||||
CERTBOT_PKG=python3-certbot-nginx
|
||||
NGINX_USER=www-data
|
||||
NGINX_ETC_FOLDER=/etc/nginx
|
||||
NGINX_CONFIGURATION=/etc/nginx/nginx.conf
|
||||
;;
|
||||
@ -1534,26 +1538,17 @@ NGINX_BISQ_ONION=$(cat "${TOR_RESOURCES}/bisq/hostname")
|
||||
NGINX_LIQUID_ONION=$(cat "${TOR_RESOURCES}/liquid/hostname")
|
||||
|
||||
echo "[*] Adding Nginx configuration"
|
||||
case $OS in
|
||||
|
||||
FreeBSD)
|
||||
echo "[*] FIXME: nginx must be configured manually on FreeBSD"
|
||||
;;
|
||||
|
||||
Debian)
|
||||
osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}"
|
||||
mkdir -p /var/cache/nginx/services /var/cache/nginx/api
|
||||
chown www-data: /var/cache/nginx/services /var/cache/nginx/api
|
||||
ln -s /mempool/mempool /etc/nginx/mempool
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!www-data!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
echo "[*] Restarting Nginx"
|
||||
osSudo "${ROOT_USER}" service nginx restart
|
||||
;;
|
||||
esac
|
||||
osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}"
|
||||
mkdir -p /var/cache/nginx/services /var/cache/nginx/api
|
||||
chown ${NGINX_USER}: /var/cache/nginx/services /var/cache/nginx/api
|
||||
ln -s /mempool/mempool /etc/nginx/mempool
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!${NGINX_USER}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}"
|
||||
echo "[*] Restarting Nginx"
|
||||
osSudo "${ROOT_USER}" service nginx restart
|
||||
|
||||
##### OS systemd
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user