mirror of
https://github.com/mempool/mempool.git
synced 2025-03-03 09:39:17 +01:00
Cleanup some log
This commit is contained in:
parent
b5ad0895ac
commit
321161ede9
7 changed files with 17 additions and 14 deletions
|
@ -410,12 +410,13 @@ class Blocks {
|
||||||
try {
|
try {
|
||||||
// Get all indexed block hash
|
// Get all indexed block hash
|
||||||
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
|
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
|
||||||
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
|
|
||||||
|
|
||||||
if (!unindexedBlockHeights?.length) {
|
if (!unindexedBlockHeights?.length) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
|
||||||
|
|
||||||
// Logging
|
// Logging
|
||||||
let count = 0;
|
let count = 0;
|
||||||
let countThisRun = 0;
|
let countThisRun = 0;
|
||||||
|
@ -616,7 +617,7 @@ class Blocks {
|
||||||
priceId: lastestPriceId,
|
priceId: lastestPriceId,
|
||||||
}]);
|
}]);
|
||||||
} else {
|
} else {
|
||||||
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
|
logger.debug(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
indexer.runSingleTask('blocksPrices');
|
indexer.runSingleTask('blocksPrices');
|
||||||
}, 10000);
|
}, 10000);
|
||||||
|
|
|
@ -164,7 +164,7 @@ class DiskCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.info('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
|
logger.err('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -452,7 +452,7 @@ class Mining {
|
||||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||||
if (elapsedSeconds > 5) {
|
if (elapsedSeconds > 5) {
|
||||||
const progress = Math.round(totalBlockChecked / blocks.length * 100);
|
const progress = Math.round(totalBlockChecked / blocks.length * 100);
|
||||||
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
|
logger.debug(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
|
||||||
timer = new Date().getTime() / 1000;
|
timer = new Date().getTime() / 1000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -558,8 +558,10 @@ class Mining {
|
||||||
currentBlockHeight -= 10000;
|
currentBlockHeight -= 10000;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (totalIndexed) {
|
if (totalIndexed > 0) {
|
||||||
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
|
logger.info(`Indexing missing coinstatsindex data completed. Indexed ${totalIndexed}`, logger.tags.mining);
|
||||||
|
} else {
|
||||||
|
logger.debug(`Indexing missing coinstatsindex data completed. Indexed 0.`, logger.tags.mining);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ class ForensicsService {
|
||||||
|
|
||||||
private async $runTasks(): Promise<void> {
|
private async $runTasks(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
logger.info(`Running forensics scans`);
|
logger.debug(`Running forensics scans`);
|
||||||
|
|
||||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||||
await this.$runClosedChannelsForensics(false);
|
await this.$runClosedChannelsForensics(false);
|
||||||
|
@ -73,7 +73,7 @@ class ForensicsService {
|
||||||
let progress = 0;
|
let progress = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info(`Started running closed channel forensics...`);
|
logger.debug(`Started running closed channel forensics...`);
|
||||||
let channels;
|
let channels;
|
||||||
if (onlyNewChannels) {
|
if (onlyNewChannels) {
|
||||||
channels = await channelsApi.$getClosedChannelsWithoutReason();
|
channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||||
|
@ -156,7 +156,7 @@ class ForensicsService {
|
||||||
this.loggerTimer = new Date().getTime() / 1000;
|
this.loggerTimer = new Date().getTime() / 1000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info(`Closed channels forensics scan complete.`);
|
logger.debug(`Closed channels forensics scan complete.`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||||
}
|
}
|
||||||
|
@ -217,7 +217,7 @@ class ForensicsService {
|
||||||
let progress = 0;
|
let progress = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info(`Started running open channel forensics...`);
|
logger.debug(`Started running open channel forensics...`);
|
||||||
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
|
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
|
||||||
|
|
||||||
for (const openChannel of channels) {
|
for (const openChannel of channels) {
|
||||||
|
@ -266,7 +266,7 @@ class ForensicsService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Open channels forensics scan complete.`);
|
logger.debug(`Open channels forensics scan complete.`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -283,7 +283,7 @@ class NetworkSyncService {
|
||||||
} else {
|
} else {
|
||||||
log += ` for the first time`;
|
log += ` for the first time`;
|
||||||
}
|
}
|
||||||
logger.info(`${log}`, logger.tags.ln);
|
logger.debug(`${log}`, logger.tags.ln);
|
||||||
|
|
||||||
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
|
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
|
||||||
for (const channel of channels) {
|
for (const channel of channels) {
|
||||||
|
|
|
@ -62,7 +62,7 @@ class PoolsUpdater {
|
||||||
if (this.currentSha === null) {
|
if (this.currentSha === null) {
|
||||||
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||||
} else {
|
} else {
|
||||||
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
logger.warn(`pools-v2.json is outdated, fetching latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
|
||||||
}
|
}
|
||||||
const poolsJson = await this.query(this.poolsUrl);
|
const poolsJson = await this.query(this.poolsUrl);
|
||||||
if (poolsJson === undefined) {
|
if (poolsJson === undefined) {
|
||||||
|
|
|
@ -222,7 +222,7 @@ class PriceUpdater {
|
||||||
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
|
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
|
||||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||||
|
|
||||||
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
|
logger.debug(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
|
||||||
|
|
||||||
const historicalPrices: PriceHistory[] = [];
|
const historicalPrices: PriceHistory[] = [];
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue