Cleanup logs

This commit is contained in:
nymkappa 2022-12-01 15:52:06 +01:00
parent 5ff5275b36
commit efb48271f9
No known key found for this signature in database
GPG key ID: E155910B16E8BD04
16 changed files with 146 additions and 145 deletions

View file

@ -296,7 +296,7 @@ class Blocks {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
}
@ -309,12 +309,12 @@ class Blocks {
newlyIndexed++;
}
if (newlyIndexed > 0) {
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
}
} catch (e) {
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
}
}
@ -385,7 +385,7 @@ class Blocks {
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`, logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 0);
const chunkSize = 10000;
@ -405,7 +405,7 @@ class Blocks {
continue;
}
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`, logger.tags.mining);
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
@ -418,7 +418,7 @@ class Blocks {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
@ -435,13 +435,13 @@ class Blocks {
currentBlockHeight -= chunkSize;
}
if (newlyIndexed > 0) {
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
}
loadingIndicators.setProgress('block-indexing', 100);
} catch (e) {
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 100);
throw e;
}
@ -537,7 +537,7 @@ class Blocks {
priceId: lastestPriceId,
}]);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);

View file

@ -670,9 +670,7 @@ class ChannelsApi {
AND status != 2
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));

View file

@ -681,9 +681,7 @@ class NodesApi {
)
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));

View file

@ -141,13 +141,13 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
// main data directory provided, default to using the bitcoin mainnet subdirectory
// to be removed in v0.2.0
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
logger.warn(`${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`, logger.tags.ln)
logger.warn(`specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`, logger.tags.ln)
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
}
}
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
logger.debug(`Connecting to ${rpcPath}`, logger.tags.ln);
super();
this.rpcPath = rpcPath;
@ -172,19 +172,19 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
this.clientConnectionPromise = new Promise<void>(resolve => {
_self.client.on('connect', () => {
logger.info(`[CLightningClient] Lightning client connected`);
logger.info(`CLightning client connected`, logger.tags.ln);
_self.reconnectWait = 1;
resolve();
});
_self.client.on('end', () => {
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
logger.err(`CLightning client connection closed, reconnecting`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
_self.client.on('error', error => {
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
logger.err(`CLightning client connection error: ${error}`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
@ -196,7 +196,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
return;
}
const data = JSON.parse(line);
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
_self.emit('res:' + data.id, data);
});
}
@ -217,7 +216,7 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
}
this.reconnectTimeout = setTimeout(() => {
logger.debug('[CLightningClient] Trying to reconnect...');
logger.debug(`Trying to reconnect...`, logger.tags.ln);
_self.client.connect(_self.rpcPath);
_self.reconnectTimeout = null;
@ -235,7 +234,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
id: '' + callInt
};
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
// Wait for the client to connect
return this.clientConnectionPromise

View file

@ -2,6 +2,7 @@ import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
import config from '../../../config';
/**
* Convert a clightning "listnode" entry to a lnd node entry
@ -40,7 +41,7 @@ export function convertNode(clNode: any): ILightningApi.Node {
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
*/
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
logger.info('Converting clightning nodes and channels to lnd graph format');
logger.debug(`Converting clightning nodes and channels to lnd graph format`, logger.tags.ln);
let loggerTimer = new Date().getTime() / 1000;
let channelProcessed = 0;
@ -62,8 +63,8 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`, logger.tags.ln);
loggerTimer = new Date().getTime() / 1000;
}
@ -76,7 +77,7 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}

View file

@ -265,9 +265,9 @@ class Mining {
}
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
if (newlyIndexed > 0) {
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
} else {
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`, logger.tags.mining);
}
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
} catch (e) {
@ -370,14 +370,14 @@ class Mining {
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
if (newlyIndexed > 0) {
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
} else {
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
}
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
} catch (e) {
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
}
}
@ -449,9 +449,9 @@ class Mining {
}
if (totalIndexed > 0) {
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
} else {
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
}
}

View file

@ -61,7 +61,7 @@ class PoolsParser {
poolNames.push(poolsDuplicated[i].name);
}
}
logger.debug(`Found ${poolNames.length} unique mining pools`);
logger.debug(`Found ${poolNames.length} unique mining pools`, logger.tags.mining);
// Get existing pools from the db
let existingPools;
@ -72,7 +72,7 @@ class PoolsParser {
existingPools = [];
}
} catch (e) {
logger.err('Cannot get existing pools from the database, skipping pools.json import');
logger.err('Cannot get existing pools from the database, skipping pools.json import', logger.tags.mining);
return;
}
@ -99,7 +99,7 @@ class PoolsParser {
slug = poolsJson['slugs'][poolNames[i]];
} catch (e) {
if (this.slugWarnFlag === false) {
logger.warn(`pools.json does not seem to contain the 'slugs' object`);
logger.warn(`pools.json does not seem to contain the 'slugs' object`, logger.tags.mining);
this.slugWarnFlag = true;
}
}
@ -107,7 +107,7 @@ class PoolsParser {
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`, logger.tags.mining);
}
const poolObj = {
@ -143,9 +143,9 @@ class PoolsParser {
'addresses': allAddresses,
'slug': slug
});
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`, logger.tags.mining);
} else {
logger.debug(`Add '${finalPoolName}' mining pool`);
logger.debug(`Add '${finalPoolName}' mining pool`, logger.tags.mining);
finalPoolDataAdd.push(poolObj);
}
}
@ -160,14 +160,14 @@ class PoolsParser {
}
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools.json import completed (no database)');
logger.info('Mining pools.json import completed (no database)', logger.tags.mining);
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
finalPoolDataRename.length > 0
) {
logger.debug(`Update pools table now`);
logger.debug(`Update pools table now`, logger.tags.mining);
// Add new mining pools into the database
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
@ -217,9 +217,9 @@ class PoolsParser {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed');
logger.info('Mining pools.json import completed', logger.tags.mining);
} catch (e) {
logger.err(`Cannot import pools in the database`);
logger.err(`Cannot import pools in the database`, logger.tags.mining);
throw e;
}
}
@ -227,7 +227,7 @@ class PoolsParser {
try {
await this.insertUnknownPool();
} catch (e) {
logger.err(`Cannot insert unknown pool in the database`);
logger.err(`Cannot insert unknown pool in the database`, logger.tags.mining);
throw e;
}
}
@ -252,7 +252,7 @@ class PoolsParser {
`);
}
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool');
logger.err('Unable to insert "Unknown" mining pool', logger.tags.mining);
}
}
@ -272,17 +272,17 @@ class PoolsParser {
for (const updatedPool of finalPoolDataUpdate) {
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
if (pool.length > 0) {
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
}
}
// Ignore early days of Bitcoin as there were not mining pool yet
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
logger.notice(`Deleting blocks with unknown mining pool from height 130635 for future re-indexing`, logger.tags.mining);
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
logger.notice('Truncating hashrates for future re-indexing');
logger.notice(`Truncating hashrates for future re-indexing`, logger.tags.mining);
await DB.query(`DELETE FROM hashrates`);
}
}

View file

@ -32,22 +32,27 @@ class Logger {
local7: 23
};
public tags = {
mining: 'Mining',
ln: 'Lightning',
};
// @ts-ignore
public emerg: ((msg: string) => void);
public emerg: ((msg: string, tag?: string) => void);
// @ts-ignore
public alert: ((msg: string) => void);
public alert: ((msg: string, tag?: string) => void);
// @ts-ignore
public crit: ((msg: string) => void);
public crit: ((msg: string, tag?: string) => void);
// @ts-ignore
public err: ((msg: string) => void);
public err: ((msg: string, tag?: string) => void);
// @ts-ignore
public warn: ((msg: string) => void);
public warn: ((msg: string, tag?: string) => void);
// @ts-ignore
public notice: ((msg: string) => void);
public notice: ((msg: string, tag?: string) => void);
// @ts-ignore
public info: ((msg: string) => void);
public info: ((msg: string, tag?: string) => void);
// @ts-ignore
public debug: ((msg: string) => void);
public debug: ((msg: string, tag?: string) => void);
private name = 'mempool';
private client: dgram.Socket;
@ -66,8 +71,8 @@ class Logger {
private addprio(prio): void {
this[prio] = (function(_this) {
return function(msg) {
return _this.msg(prio, msg);
return function(msg, tag?: string) {
return _this.msg(prio, msg, tag);
};
})(this);
}
@ -85,7 +90,7 @@ class Logger {
return '';
}
private msg(priority, msg) {
private msg(priority, msg, tag?: string) {
let consolemsg, prionum, syslogmsg;
if (typeof msg === 'string' && msg.length > 0) {
while (msg[msg.length - 1].charCodeAt(0) === 10) {
@ -94,10 +99,10 @@ class Logger {
}
const network = this.network ? ' <' + this.network + '>' : '';
prionum = Logger.priorities[priority] || Logger.priorities.info;
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${msg}`;
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
if (config.SYSLOG.ENABLED && Logger.priorities[priority] <= Logger.priorities[config.SYSLOG.MIN_PRIORITY]) {
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${msg}`;
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
this.syslog(syslogmsg);
}
if (Logger.priorities[priority] > Logger.priorities[config.MEMPOOL.STDOUT_LOG_MIN_PRIORITY]) {

View file

@ -23,7 +23,7 @@ class NetworkSyncService {
constructor() {}
public async $startService(): Promise<void> {
logger.info('Starting lightning network sync service');
logger.info(`Starting lightning network sync service`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
@ -33,11 +33,11 @@ class NetworkSyncService {
private async $runTasks(): Promise<void> {
const taskStartTime = Date.now();
try {
logger.info(`Updating nodes and channels`);
logger.debug(`Updating nodes and channels`, logger.tags.ln);
const networkGraph = await lightningApi.$getNetworkGraph();
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
logger.info(`LN Network graph is empty, retrying in 10 seconds`, logger.tags.ln);
setTimeout(() => { this.$runTasks(); }, 10000);
return;
}
@ -55,7 +55,7 @@ class NetworkSyncService {
}
} catch (e) {
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$runTasks() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
setTimeout(() => { this.$runTasks(); }, Math.max(1, (1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL) - (Date.now() - taskStartTime)));
@ -79,8 +79,8 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node ${progress}/${nodes.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
@ -106,7 +106,7 @@ class NetworkSyncService {
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
}
}
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
logger.debug(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
// If a channel if not present in the graph, mark it as inactive
await nodesApi.$setNodesInactive(graphNodesPubkeys);
@ -138,18 +138,18 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating channel ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`${progress} channels updated`);
logger.debug(`${progress} channels updated`, logger.tags.ln);
// If a channel if not present in the graph, mark it as inactive
await channelsApi.$setChannelsInactive(graphChannelsIds);
} catch (e) {
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(` Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.ln);
}
}
@ -184,26 +184,28 @@ class NetworkSyncService {
if (lowest < node.first_seen) {
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
const params = [lowest, node.public_key];
++updated;
await DB.query(query, params);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node first seen date ${progress}/${nodes.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
++updated;
}
}
logger.info(`Updated ${updated} node first seen dates`);
if (updated > 0) {
logger.debug(`Updated ${updated} node first seen dates`, logger.tags.ln);
}
} catch (e) {
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$updateNodeFirstSeen() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
private async $lookUpCreationDateFromChain(): Promise<void> {
let progress = 0;
logger.info(`Running channel creation date lookup`);
logger.debug(`Running channel creation date lookup`, logger.tags.ln);
try {
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
for (const channel of channels) {
@ -214,14 +216,17 @@ class NetworkSyncService {
);
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel creation date ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating channel creation date ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Updated ${channels.length} channels' creation date`);
if (channels.length > 0) {
logger.debug(`Updated ${channels.length} channels' creation date`, logger.tags.ln);
}
} catch (e) {
logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$lookUpCreationDateFromChain() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
@ -230,7 +235,7 @@ class NetworkSyncService {
* mark that channel as inactive
*/
private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
logger.info(`Find channels which nodes are offline`);
logger.debug(`Find channels which nodes are offline`, logger.tags.ln);
try {
const result = await DB.query<ResultSetHeader>(`
@ -253,12 +258,10 @@ class NetworkSyncService {
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
} else {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`, logger.tags.ln);
}
} catch (e) {
logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$deactivateChannelsWithoutActiveNodes() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
@ -277,13 +280,13 @@ class NetworkSyncService {
} else {
log += ` for the first time`;
}
logger.info(log);
logger.info(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
logger.debug('Marking channel: ' + channel.id + ' as closed.');
logger.debug(`Marking channel: ${channel.id} as closed.`, logger.tags.ln);
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
[spendingTx.status.block_time, channel.id]);
if (spendingTx.txid && !channel.closing_transaction_id) {
@ -293,16 +296,16 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
logger.info(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`);
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
} catch (e) {
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
}

View file

@ -6,7 +6,7 @@ import { Common } from '../../api/common';
class LightningStatsUpdater {
public async $startService(): Promise<void> {
logger.info('Starting Lightning Stats service');
logger.info(`Starting Lightning Stats service`, logger.tags.ln);
await this.$runTasks();
LightningStatsImporter.$run();
@ -27,7 +27,7 @@ class LightningStatsUpdater {
const networkGraph = await lightningApi.$getNetworkGraph();
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
logger.info(`Updated latest network stats`);
logger.debug(`Updated latest network stats`, logger.tags.ln);
}
}

View file

@ -21,10 +21,10 @@ class FundingTxFetcher {
try {
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
} catch (e) {
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`, logger.tags.ln);
this.fundingTxCache = {};
}
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`, logger.tags.ln);
}
}
@ -44,26 +44,27 @@ class FundingTxFetcher {
++channelProcessed;
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
`elapsed: ${elapsedSeconds} seconds`
`elapsed: ${elapsedSeconds} seconds`,
logger.tags.ln
);
loggerTimer = new Date().getTime() / 1000;
}
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
if (elapsedSeconds > 60) {
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
cacheTimer = new Date().getTime() / 1000;
}
}
if (this.channelNewlyProcessed > 0) {
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`, logger.tags.ln);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
}

View file

@ -14,7 +14,7 @@ export async function $lookupNodeLocation(): Promise<void> {
let nodesUpdated = 0;
let geoNamesInserted = 0;
logger.info(`Running node location updater using Maxmind`);
logger.debug(`Running node location updater using Maxmind`, logger.tags.ln);
try {
const nodes = await nodesApi.$getAllNodes();
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
@ -152,8 +152,8 @@ export async function $lookupNodeLocation(): Promise<void> {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node location data ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node location data ${progress}/${nodes.length}`);
loggerTimer = new Date().getTime() / 1000;
}
}
@ -161,9 +161,7 @@ export async function $lookupNodeLocation(): Promise<void> {
}
if (nodesUpdated > 0) {
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
} else {
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`, logger.tags.ln);
}
} catch (e) {
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));

View file

@ -8,7 +8,6 @@ import { isIP } from 'net';
import { Common } from '../../../api/common';
import channelsApi from '../../../api/explorer/channels.api';
import nodesApi from '../../../api/explorer/nodes.api';
import { ResultSetHeader } from 'mysql2';
const fsPromises = promises;
@ -17,7 +16,7 @@ class LightningStatsImporter {
async $run(): Promise<void> {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info('Caching funding txs for currently existing channels');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
@ -108,7 +107,7 @@ class LightningStatsImporter {
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
if (!tx) {
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`, logger.tags.ln);
continue;
}
@ -316,7 +315,7 @@ class LightningStatsImporter {
try {
fileList = await fsPromises.readdir(this.topologiesFolder);
} catch (e) {
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`);
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`, logger.tags.ln);
throw e;
}
// Insert history from the most recent to the oldest
@ -354,7 +353,7 @@ class LightningStatsImporter {
continue;
}
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
logger.debug(`Reading ${this.topologiesFolder}/${filename}`, logger.tags.ln);
let fileContent = '';
try {
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
@ -363,7 +362,7 @@ class LightningStatsImporter {
totalProcessed++;
continue;
}
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`, logger.tags.ln);
totalProcessed++;
continue;
}
@ -373,7 +372,7 @@ class LightningStatsImporter {
graph = JSON.parse(fileContent);
graph = await this.cleanupTopology(graph);
} catch (e) {
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
totalProcessed++;
continue;
}
@ -385,20 +384,20 @@ class LightningStatsImporter {
}
if (!logStarted) {
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`, logger.tags.ln);
logStarted = true;
}
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`, logger.tags.ln);
totalProcessed++;
if (processed > 10) {
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
processed = 0;
} else {
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
}
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
const stat = await this.computeNetworkStats(timestamp, graph, true);
@ -407,10 +406,10 @@ class LightningStatsImporter {
}
if (totalProcessed > 0) {
logger.info(`Lightning network stats historical import completed`);
logger.notice(`Lightning network stats historical import completed`, logger.tags.ln);
}
} catch (e) {
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}

View file

@ -32,9 +32,9 @@ class PoolsUpdater {
this.lastRun = now;
if (config.SOCKS5PROXY.ENABLED) {
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`);
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`, logger.tags.mining);
} else {
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`);
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`, logger.tags.mining);
}
try {
@ -53,9 +53,9 @@ class PoolsUpdater {
}
if (this.currentSha === undefined) {
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`);
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`, logger.tags.mining);
} else {
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`);
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
@ -63,11 +63,11 @@ class PoolsUpdater {
}
await poolsParser.migratePoolsJson(poolsJson);
await this.updateDBSha(githubSha);
logger.notice('PoolsUpdater completed');
logger.notice(`PoolsUpdater completed`, logger.tags.mining);
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
logger.err(`PoolsUpdater failed. Will try again in 24h. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
}
@ -81,7 +81,7 @@ class PoolsUpdater {
await DB.query('DELETE FROM state where name="pools_json_sha"');
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
} catch (e) {
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
}
@ -94,7 +94,7 @@ class PoolsUpdater {
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
return (rows.length > 0 ? rows[0].string : undefined);
} catch (e) {
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
return undefined;
}
}
@ -113,7 +113,7 @@ class PoolsUpdater {
}
}
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`);
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`, logger.tags.mining);
return undefined;
}

View file

@ -91,7 +91,7 @@ class KrakenApi implements PriceFeed {
}
if (Object.keys(priceHistory).length > 0) {
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`);
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
}
}
}

View file

@ -82,7 +82,7 @@ class PriceUpdater {
await this.$updatePrice();
}
} catch (e) {
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
this.running = false;
@ -115,14 +115,14 @@ class PriceUpdater {
if (price > 0) {
prices.push(price);
}
logger.debug(`${feed.name} BTC/${currency} price: ${price}`);
logger.debug(`${feed.name} BTC/${currency} price: ${price}`, logger.tags.mining);
} catch (e) {
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
}
}
}
if (prices.length === 1) {
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`);
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`, logger.tags.mining);
}
// Compute average price, non weighted
@ -175,9 +175,9 @@ class PriceUpdater {
++insertedCount;
}
if (insertedCount > 0) {
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
} else {
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
}
// Insert Kraken weekly prices
@ -198,7 +198,7 @@ class PriceUpdater {
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database, this may take a while`);
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
const historicalPrices: PriceHistory[] = [];
@ -207,7 +207,7 @@ class PriceUpdater {
try {
historicalPrices.push(await feed.$fetchRecentPrice(this.currencies, type));
} catch (e) {
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
}
@ -252,9 +252,9 @@ class PriceUpdater {
}
if (totalInserted > 0) {
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
} else {
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
}
}
}