From 4723a9d41b17cdd8b8d31a7c9215aa4cedd16b3a Mon Sep 17 00:00:00 2001 From: nymkappa Date: Thu, 7 Jul 2022 13:41:09 +0200 Subject: [PATCH 1/2] Re-index related blocks when mining pool.json changes --- backend/src/api/pools-parser.ts | 106 ++++++++++++++++++++++---------- 1 file changed, 74 insertions(+), 32 deletions(-) diff --git a/backend/src/api/pools-parser.ts b/backend/src/api/pools-parser.ts index 15a4fe7be..29d916376 100644 --- a/backend/src/api/pools-parser.ts +++ b/backend/src/api/pools-parser.ts @@ -1,6 +1,7 @@ import DB from '../database'; import logger from '../logger'; import config from '../config'; +import BlocksRepository from '../repositories/BlocksRepository'; interface Pool { name: string; @@ -32,7 +33,6 @@ class PoolsParser { // First we save every entries without paying attention to pool duplication const poolsDuplicated: Pool[] = []; - logger.debug('Parse coinbase_tags'); const coinbaseTags = Object.entries(poolsJson['coinbase_tags']); for (let i = 0; i < coinbaseTags.length; ++i) { poolsDuplicated.push({ @@ -43,7 +43,6 @@ class PoolsParser { 'slug': '' }); } - logger.debug('Parse payout_addresses'); const addressesTags = Object.entries(poolsJson['payout_addresses']); for (let i = 0; i < addressesTags.length; ++i) { poolsDuplicated.push({ @@ -56,7 +55,6 @@ class PoolsParser { } // Then, we find unique mining pool names - logger.debug('Identify unique mining pools'); const poolNames: string[] = []; for (let i = 0; i < poolsDuplicated.length; ++i) { if (poolNames.indexOf(poolsDuplicated[i].name) === -1) { @@ -119,8 +117,15 @@ class PoolsParser { 'slug': slug }; - if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) { - finalPoolDataUpdate.push(poolObj); + const existingPool = existingPools.find((pool) => pool.name === poolNames[i]); + if (existingPool !== undefined) { + // Check if any data was actually updated + const equals = (a, b) => + a.length === b.length && + a.every((v, i) => v === b[i]); + if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) { + finalPoolDataUpdate.push(poolObj); + } } else { logger.debug(`Add '${finalPoolName}' mining pool`); finalPoolDataAdd.push(poolObj); @@ -140,40 +145,51 @@ class PoolsParser { return; } - logger.debug(`Update pools table now`); + if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) { + logger.debug(`Update pools table now`); - // Add new mining pools into the database - let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES '; - for (let i = 0; i < finalPoolDataAdd.length; ++i) { - queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}', - '${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}', - ${JSON.stringify(finalPoolDataAdd[i].slug)}),`; - } - queryAdd = queryAdd.slice(0, -1) + ';'; + // Add new mining pools into the database + let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES '; + for (let i = 0; i < finalPoolDataAdd.length; ++i) { + queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}', + '${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}', + ${JSON.stringify(finalPoolDataAdd[i].slug)}),`; + } + queryAdd = queryAdd.slice(0, -1) + ';'; - // Updated existing mining pools in the database - const updateQueries: string[] = []; - for (let i = 0; i < finalPoolDataUpdate.length; ++i) { - updateQueries.push(` - UPDATE pools - SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}', - regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}', - slug='${finalPoolDataUpdate[i].slug}' - WHERE name='${finalPoolDataUpdate[i].name}' - ;`); + // Updated existing mining pools in the database + const updateQueries: string[] = []; + for (let i = 0; i < finalPoolDataUpdate.length; ++i) { + updateQueries.push(` + UPDATE pools + SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}', + regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}', + slug='${finalPoolDataUpdate[i].slug}' + WHERE name='${finalPoolDataUpdate[i].name}' + ;`); + } + + try { + await this.$deleteBlocskToReindex(finalPoolDataUpdate); + + if (finalPoolDataAdd.length > 0) { + await DB.query({ sql: queryAdd, timeout: 120000 }); + } + for (const query of updateQueries) { + await DB.query({ sql: query, timeout: 120000 }); + } + await this.insertUnknownPool(); + logger.info('Mining pools.json import completed'); + } catch (e) { + logger.err(`Cannot import pools in the database`); + throw e; + } } try { - if (finalPoolDataAdd.length > 0) { - await DB.query({ sql: queryAdd, timeout: 120000 }); - } - for (const query of updateQueries) { - await DB.query({ sql: query, timeout: 120000 }); - } await this.insertUnknownPool(); - logger.info('Mining pools.json import completed'); } catch (e) { - logger.err(`Cannot import pools in the database`); + logger.err(`Cannot insert unknown pool in the database`); throw e; } } @@ -201,6 +217,32 @@ class PoolsParser { logger.err('Unable to insert "Unknown" mining pool'); } } + + /** + * Delete blocks which needs to be reindexed + */ + private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) { + const blockCount = await BlocksRepository.$blockCount(null, null); + if (blockCount === 0) { + return; + } + + for (const updatedPool of finalPoolDataUpdate) { + const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`); + if (pool.length > 0) { + logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`); + await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`); + } + } + + // Ignore early days of Bitcoin as there were not mining pool yet + logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing'); + const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`); + await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`); + + logger.notice('Truncating hashrates for future re-indexing'); + await DB.query(`DELETE FROM hashrates`); + } } export default new PoolsParser(); From d0381e7850ee20de1a67bd58f4936bf257879b38 Mon Sep 17 00:00:00 2001 From: Stephan Oeste Date: Fri, 8 Jul 2022 15:59:39 +0200 Subject: [PATCH 2/2] Install nginx config also for freebsd on prod install --- production/install | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/production/install b/production/install index 9a48b50c5..23cb0e634 100755 --- a/production/install +++ b/production/install @@ -183,6 +183,9 @@ case $OS in TOR_PKG=tor TOR_USER=_tor TOR_GROUP=_tor + NGINX_USER=www + NGINX_ETC_FOLDER=/usr/local/etc/nginx + NGINX_CONFIGURATION=/usr/local/etc/nginx/nginx.conf CERTBOT_PKG=py39-certbot ;; @@ -197,6 +200,7 @@ case $OS in TOR_USER=debian-tor TOR_GROUP=debian-tor CERTBOT_PKG=python3-certbot-nginx + NGINX_USER=www-data NGINX_ETC_FOLDER=/etc/nginx NGINX_CONFIGURATION=/etc/nginx/nginx.conf ;; @@ -1536,26 +1540,17 @@ NGINX_BISQ_ONION=$(cat "${TOR_RESOURCES}/bisq/hostname") NGINX_LIQUID_ONION=$(cat "${TOR_RESOURCES}/liquid/hostname") echo "[*] Adding Nginx configuration" -case $OS in - - FreeBSD) - echo "[*] FIXME: nginx must be configured manually on FreeBSD" - ;; - - Debian) - osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}" - mkdir -p /var/cache/nginx/services /var/cache/nginx/api - chown www-data: /var/cache/nginx/services /var/cache/nginx/api - ln -s /mempool/mempool /etc/nginx/mempool - osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!www-data!" "${NGINX_CONFIGURATION}" - osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}" - osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}" - osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}" - osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}" - echo "[*] Restarting Nginx" - osSudo "${ROOT_USER}" service nginx restart - ;; -esac +osSudo "${ROOT_USER}" install -c -o "${ROOT_USER}" -g "${ROOT_GROUP}" -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/nginx/nginx.conf" "${NGINX_CONFIGURATION}" +mkdir -p /var/cache/nginx/services /var/cache/nginx/api +chown ${NGINX_USER}: /var/cache/nginx/services /var/cache/nginx/api +ln -s /mempool/mempool /etc/nginx/mempool +osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_USER__!${NGINX_USER}!" "${NGINX_CONFIGURATION}" +osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_ETC_FOLDER__!${NGINX_ETC_FOLDER}!" "${NGINX_CONFIGURATION}" +osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_MEMPOOL_ONION__!${NGINX_MEMPOOL_ONION%.onion}!" "${NGINX_CONFIGURATION}" +osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_BISQ_ONION__!${NGINX_BISQ_ONION%.onion}!" "${NGINX_CONFIGURATION}" +osSudo "${ROOT_USER}" sed -i.orig "s!__NGINX_LIQUID_ONION__!${NGINX_LIQUID_ONIONi%.onion}!" "${NGINX_CONFIGURATION}" +echo "[*] Restarting Nginx" +osSudo "${ROOT_USER}" service nginx restart ##### OS systemd