mirror of
https://github.com/mempool/mempool.git
synced 2025-02-23 14:40:38 +01:00
Merge pull request #2231 from mempool/nymkappa/feature/ln-historical-import
Import LN historical statistics (network wide + per node)
This commit is contained in:
commit
feda827860
8 changed files with 506 additions and 315 deletions
38
backend/package-lock.json
generated
38
backend/package-lock.json
generated
|
@ -31,6 +31,7 @@
|
|||
"@typescript-eslint/parser": "^5.30.5",
|
||||
"eslint": "^8.19.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"fast-xml-parser": "^4.0.9",
|
||||
"prettier": "^2.7.1"
|
||||
}
|
||||
},
|
||||
|
@ -1496,6 +1497,22 @@
|
|||
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/fast-xml-parser": {
|
||||
"version": "4.0.9",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.0.9.tgz",
|
||||
"integrity": "sha512-4G8EzDg2Nb1Qurs3f7BpFV4+jpMVsdgLVuG1Uv8O2OHJfVCg7gcA53obuKbmVqzd4Y7YXVBK05oJG7hzGIdyzg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"strnum": "^1.0.5"
|
||||
},
|
||||
"bin": {
|
||||
"fxparser": "src/cli/cli.js"
|
||||
},
|
||||
"funding": {
|
||||
"type": "paypal",
|
||||
"url": "https://paypal.me/naturalintelligence"
|
||||
}
|
||||
},
|
||||
"node_modules/fastq": {
|
||||
"version": "1.13.0",
|
||||
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz",
|
||||
|
@ -2665,6 +2682,12 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/strnum": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz",
|
||||
"integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/text-table": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
||||
|
@ -3973,6 +3996,15 @@
|
|||
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
|
||||
"dev": true
|
||||
},
|
||||
"fast-xml-parser": {
|
||||
"version": "4.0.9",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.0.9.tgz",
|
||||
"integrity": "sha512-4G8EzDg2Nb1Qurs3f7BpFV4+jpMVsdgLVuG1Uv8O2OHJfVCg7gcA53obuKbmVqzd4Y7YXVBK05oJG7hzGIdyzg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"strnum": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"fastq": {
|
||||
"version": "1.13.0",
|
||||
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz",
|
||||
|
@ -4817,6 +4849,12 @@
|
|||
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
||||
"dev": true
|
||||
},
|
||||
"strnum": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz",
|
||||
"integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==",
|
||||
"dev": true
|
||||
},
|
||||
"text-table": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
"bitcoinjs-lib": "6.0.1",
|
||||
"crypto-js": "^4.0.0",
|
||||
"express": "^4.18.0",
|
||||
"fast-xml-parser": "^4.0.9",
|
||||
"maxmind": "^4.3.6",
|
||||
"mysql2": "2.3.3",
|
||||
"node-worker-threads-pool": "^1.5.1",
|
||||
|
|
|
@ -4,7 +4,7 @@ import logger from '../logger';
|
|||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 33;
|
||||
private static currentVersion = 34;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
@ -311,6 +311,10 @@ class DatabaseMigration {
|
|||
if (databaseSchemaVersion < 33 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 34 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -31,6 +31,7 @@ interface IConfig {
|
|||
LIGHTNING: {
|
||||
ENABLED: boolean;
|
||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||
TOPOLOGY_FOLDER: string;
|
||||
};
|
||||
LND: {
|
||||
TLS_CERT_PATH: string;
|
||||
|
@ -177,7 +178,8 @@ const defaults: IConfig = {
|
|||
},
|
||||
'LIGHTNING': {
|
||||
'ENABLED': false,
|
||||
'BACKEND': 'lnd'
|
||||
'BACKEND': 'lnd',
|
||||
'TOPOLOGY_FOLDER': '',
|
||||
},
|
||||
'LND': {
|
||||
'TLS_CERT_PATH': '',
|
||||
|
|
|
@ -29,11 +29,11 @@ import channelsRoutes from './api/explorer/channels.routes';
|
|||
import generalLightningRoutes from './api/explorer/general.routes';
|
||||
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
|
||||
import nodeSyncService from './tasks/lightning/node-sync.service';
|
||||
import statisticsRoutes from "./api/statistics/statistics.routes";
|
||||
import miningRoutes from "./api/mining/mining-routes";
|
||||
import bisqRoutes from "./api/bisq/bisq.routes";
|
||||
import liquidRoutes from "./api/liquid/liquid.routes";
|
||||
import bitcoinRoutes from "./api/bitcoin/bitcoin.routes";
|
||||
import statisticsRoutes from './api/statistics/statistics.routes';
|
||||
import miningRoutes from './api/mining/mining-routes';
|
||||
import bisqRoutes from './api/bisq/bisq.routes';
|
||||
import liquidRoutes from './api/liquid/liquid.routes';
|
||||
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
|
|
|
@ -1,35 +1,14 @@
|
|||
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import * as net from 'net';
|
||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||
|
||||
class LightningStatsUpdater {
|
||||
hardCodedStartTime = '2018-01-12';
|
||||
|
||||
public async $startService() {
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting Lightning Stats service');
|
||||
let isInSync = false;
|
||||
let error: any;
|
||||
try {
|
||||
error = null;
|
||||
isInSync = await this.$lightningIsSynced();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
if (!isInSync) {
|
||||
if (error) {
|
||||
logger.warn('Was not able to fetch Lightning Node status: ' + (error instanceof Error ? error.message : error) + '. Retrying in 1 minute...');
|
||||
} else {
|
||||
logger.notice('The Lightning graph is not yet in sync. Retrying in 1 minute...');
|
||||
}
|
||||
setTimeout(() => this.$startService(), 60 * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$populateHistoricalStatistics();
|
||||
await this.$populateHistoricalNodeStatistics();
|
||||
LightningStatsImporter.$run();
|
||||
|
||||
setTimeout(() => {
|
||||
this.$runTasks();
|
||||
|
@ -50,298 +29,22 @@ class LightningStatsUpdater {
|
|||
date.setUTCMilliseconds(0);
|
||||
}
|
||||
|
||||
private async $lightningIsSynced(): Promise<boolean> {
|
||||
const nodeInfo = await lightningApi.$getInfo();
|
||||
return nodeInfo.synced_to_chain && nodeInfo.synced_to_graph;
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
await this.$logLightningStatsDaily();
|
||||
await this.$logNodeStatsDaily();
|
||||
await this.$logStatsDaily();
|
||||
|
||||
setTimeout(() => {
|
||||
this.$runTasks();
|
||||
}, this.timeUntilMidnight());
|
||||
}
|
||||
|
||||
private async $logLightningStatsDaily() {
|
||||
try {
|
||||
private async $logStatsDaily(): Promise<void> {
|
||||
const date = new Date();
|
||||
this.setDateMidnight(date);
|
||||
date.setUTCHours(24);
|
||||
|
||||
logger.info(`Running lightning daily stats log...`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
let total_capacity = 0;
|
||||
for (const channel of networkGraph.edges) {
|
||||
if (channel.capacity) {
|
||||
total_capacity += parseInt(channel.capacity);
|
||||
}
|
||||
}
|
||||
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
for (const node of networkGraph.nodes) {
|
||||
for (const socket of node.addresses) {
|
||||
const hasOnion = socket.addr.indexOf('.onion') !== -1;
|
||||
if (hasOnion) {
|
||||
torNodes++;
|
||||
}
|
||||
const hasClearnet = [4, 6].includes(net.isIP(socket.addr.split(':')[0]));
|
||||
if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
}
|
||||
}
|
||||
if (node.addresses.length === 0) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
const channelStats = await channelsApi.$getChannelsStats();
|
||||
|
||||
const query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (NOW() - INTERVAL 1 DAY, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
networkGraph.edges.length,
|
||||
networkGraph.nodes.length,
|
||||
total_capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
channelStats.avgCapacity,
|
||||
channelStats.avgFeeRate,
|
||||
channelStats.avgBaseFee,
|
||||
channelStats.medianCapacity,
|
||||
channelStats.medianFeeRate,
|
||||
channelStats.medianBaseFee,
|
||||
]);
|
||||
logger.info(`Lightning daily stats done.`);
|
||||
} catch (e) {
|
||||
logger.err('$logLightningStatsDaily() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $logNodeStatsDaily() {
|
||||
try {
|
||||
logger.info(`Running daily node stats update...`);
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, c1.channels_count_left, c2.channels_count_right, c1.channels_capacity_left,
|
||||
c2.channels_capacity_right
|
||||
FROM nodes
|
||||
LEFT JOIN (
|
||||
SELECT node1_public_key, COUNT(id) AS channels_count_left, SUM(capacity) AS channels_capacity_left
|
||||
FROM channels
|
||||
WHERE channels.status = 1
|
||||
GROUP BY node1_public_key
|
||||
) c1 ON c1.node1_public_key = nodes.public_key
|
||||
LEFT JOIN (
|
||||
SELECT node2_public_key, COUNT(id) AS channels_count_right, SUM(capacity) AS channels_capacity_right
|
||||
FROM channels WHERE channels.status = 1 GROUP BY node2_public_key
|
||||
) c2 ON c2.node2_public_key = nodes.public_key
|
||||
`;
|
||||
|
||||
const [nodes]: any = await DB.query(query);
|
||||
|
||||
for (const node of nodes) {
|
||||
await DB.query(
|
||||
`INSERT INTO node_stats(public_key, added, capacity, channels) VALUES (?, NOW() - INTERVAL 1 DAY, ?, ?)`,
|
||||
[node.public_key, (parseInt(node.channels_capacity_left || 0, 10)) + (parseInt(node.channels_capacity_right || 0, 10)),
|
||||
node.channels_count_left + node.channels_count_right]);
|
||||
}
|
||||
logger.info('Daily node stats has updated.');
|
||||
} catch (e) {
|
||||
logger.err('$logNodeStatsDaily() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// We only run this on first launch
|
||||
private async $populateHistoricalStatistics() {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT COUNT(*) FROM lightning_stats`);
|
||||
// Only run if table is empty
|
||||
if (rows[0]['COUNT(*)'] > 0) {
|
||||
return;
|
||||
}
|
||||
logger.info(`Running historical stats population...`);
|
||||
|
||||
const [channels]: any = await DB.query(`SELECT capacity, created, closing_date FROM channels ORDER BY created ASC`);
|
||||
const [nodes]: any = await DB.query(`SELECT first_seen, sockets FROM nodes ORDER BY first_seen ASC`);
|
||||
|
||||
const date: Date = new Date(this.hardCodedStartTime);
|
||||
const currentDate = new Date();
|
||||
this.setDateMidnight(currentDate);
|
||||
|
||||
while (date < currentDate) {
|
||||
let totalCapacity = 0;
|
||||
let channelsCount = 0;
|
||||
|
||||
for (const channel of channels) {
|
||||
if (new Date(channel.created) > date) {
|
||||
break;
|
||||
}
|
||||
if (channel.closing_date === null || new Date(channel.closing_date) > date) {
|
||||
totalCapacity += channel.capacity;
|
||||
channelsCount++;
|
||||
}
|
||||
}
|
||||
|
||||
let nodeCount = 0;
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
for (const node of nodes) {
|
||||
if (new Date(node.first_seen) > date) {
|
||||
break;
|
||||
}
|
||||
nodeCount++;
|
||||
|
||||
const sockets = node.sockets.split(',');
|
||||
let isUnnanounced = true;
|
||||
for (const socket of sockets) {
|
||||
const hasOnion = socket.indexOf('.onion') !== -1;
|
||||
if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
const hasClearnet = [4, 6].includes(net.isIP(socket.substring(0, socket.lastIndexOf(':'))));
|
||||
if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
const query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
|
||||
const rowTimestamp = date.getTime() / 1000; // Save timestamp for the row insertion down below
|
||||
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
|
||||
// Last iteration, save channels stats
|
||||
const channelStats = (date >= currentDate ? await channelsApi.$getChannelsStats() : undefined);
|
||||
|
||||
await DB.query(query, [
|
||||
rowTimestamp,
|
||||
channelsCount,
|
||||
nodeCount,
|
||||
totalCapacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
channelStats?.avgCapacity ?? 0,
|
||||
channelStats?.avgFeeRate ?? 0,
|
||||
channelStats?.avgBaseFee ?? 0,
|
||||
channelStats?.medianCapacity ?? 0,
|
||||
channelStats?.medianFeeRate ?? 0,
|
||||
channelStats?.medianBaseFee ?? 0,
|
||||
]);
|
||||
}
|
||||
|
||||
logger.info('Historical stats populated.');
|
||||
} catch (e) {
|
||||
logger.err('$populateHistoricalData() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $populateHistoricalNodeStatistics() {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT COUNT(*) FROM node_stats`);
|
||||
// Only run if table is empty
|
||||
if (rows[0]['COUNT(*)'] > 0) {
|
||||
return;
|
||||
}
|
||||
logger.info(`Running historical node stats population...`);
|
||||
|
||||
const [nodes]: any = await DB.query(`SELECT public_key, first_seen, alias FROM nodes ORDER BY first_seen ASC`);
|
||||
|
||||
for (const node of nodes) {
|
||||
const [channels]: any = await DB.query(`SELECT capacity, created, closing_date FROM channels WHERE node1_public_key = ? OR node2_public_key = ? ORDER BY created ASC`, [node.public_key, node.public_key]);
|
||||
|
||||
const date: Date = new Date(this.hardCodedStartTime);
|
||||
const currentDate = new Date();
|
||||
this.setDateMidnight(currentDate);
|
||||
|
||||
let lastTotalCapacity = 0;
|
||||
let lastChannelsCount = 0;
|
||||
|
||||
while (date < currentDate) {
|
||||
let totalCapacity = 0;
|
||||
let channelsCount = 0;
|
||||
for (const channel of channels) {
|
||||
if (new Date(channel.created) > date) {
|
||||
break;
|
||||
}
|
||||
if (channel.closing_date !== null && new Date(channel.closing_date) < date) {
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
continue;
|
||||
}
|
||||
totalCapacity += channel.capacity;
|
||||
channelsCount++;
|
||||
}
|
||||
|
||||
if (lastTotalCapacity === totalCapacity && lastChannelsCount === channelsCount) {
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
lastTotalCapacity = totalCapacity;
|
||||
lastChannelsCount = channelsCount;
|
||||
|
||||
const query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.public_key,
|
||||
date.getTime() / 1000,
|
||||
totalCapacity,
|
||||
channelsCount,
|
||||
]);
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
}
|
||||
logger.debug('Updated node_stats for: ' + node.alias);
|
||||
}
|
||||
logger.info('Historical stats populated.');
|
||||
} catch (e) {
|
||||
logger.err('$populateHistoricalNodeData() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
LightningStatsImporter.computeNetworkStats(date.getTime(), networkGraph);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
113
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
113
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
|
@ -0,0 +1,113 @@
|
|||
import { existsSync, promises } from 'fs';
|
||||
import bitcoinApiFactory from '../../../api/bitcoin/bitcoin-api-factory';
|
||||
import bitcoinClient from '../../../api/bitcoin/bitcoin-client';
|
||||
import config from '../../../config';
|
||||
import DB from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
const BLOCKS_CACHE_MAX_SIZE = 100;
|
||||
const CACHE_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/ln-funding-txs-cache.json';
|
||||
|
||||
class FundingTxFetcher {
|
||||
private running = false;
|
||||
private blocksCache = {};
|
||||
private channelNewlyProcessed = 0;
|
||||
public fundingTxCache = {};
|
||||
|
||||
async $fetchChannelsFundingTxs(channelIds: string[]): Promise<void> {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
|
||||
// Load funding tx disk cache
|
||||
if (Object.keys(this.fundingTxCache).length === 0 && existsSync(CACHE_FILE_NAME)) {
|
||||
try {
|
||||
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
|
||||
} catch (e) {
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
|
||||
this.fundingTxCache = {};
|
||||
}
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
|
||||
}
|
||||
|
||||
const globalTimer = new Date().getTime() / 1000;
|
||||
let cacheTimer = new Date().getTime() / 1000;
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
this.channelNewlyProcessed = 0;
|
||||
for (const channelId of channelIds) {
|
||||
await this.$fetchChannelOpenTx(channelId);
|
||||
++channelProcessed;
|
||||
|
||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||
logger.debug(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||
`elapsed: ${elapsedSeconds} seconds`
|
||||
);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
|
||||
if (elapsedSeconds > 60) {
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
cacheTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.channelNewlyProcessed > 0) {
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
public async $fetchChannelOpenTx(channelId: string): Promise<any> {
|
||||
if (this.fundingTxCache[channelId]) {
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
|
||||
const parts = channelId.split('x');
|
||||
const blockHeight = parts[0];
|
||||
const txIdx = parts[1];
|
||||
const outputIdx = parts[2];
|
||||
|
||||
let block = this.blocksCache[blockHeight];
|
||||
// Fetch it from core
|
||||
if (!block) {
|
||||
const blockHash = await bitcoinClient.getBlockHash(parseInt(blockHeight, 10));
|
||||
block = await bitcoinClient.getBlock(blockHash, 1);
|
||||
}
|
||||
this.blocksCache[block.height] = block;
|
||||
|
||||
const blocksCacheHashes = Object.keys(this.blocksCache).sort((a, b) => parseInt(b) - parseInt(a)).reverse();
|
||||
if (blocksCacheHashes.length > BLOCKS_CACHE_MAX_SIZE) {
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
delete this.blocksCache[blocksCacheHashes[i]];
|
||||
}
|
||||
}
|
||||
|
||||
const txid = block.tx[txIdx];
|
||||
const rawTx = await bitcoinClient.getRawTransaction(txid);
|
||||
const tx = await bitcoinClient.decodeRawTransaction(rawTx);
|
||||
|
||||
this.fundingTxCache[channelId] = {
|
||||
timestamp: block.time,
|
||||
txid: txid,
|
||||
value: tx.vout[outputIdx].value,
|
||||
};
|
||||
|
||||
++this.channelNewlyProcessed;
|
||||
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
}
|
||||
|
||||
export default new FundingTxFetcher;
|
330
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
330
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
|
@ -0,0 +1,330 @@
|
|||
import DB from '../../../database';
|
||||
import { promises } from 'fs';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
import logger from '../../../logger';
|
||||
import fundingTxFetcher from './funding-tx-fetcher';
|
||||
import config from '../../../config';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
interface Node {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
rgb_color: string;
|
||||
alias: string;
|
||||
addresses: string;
|
||||
out_degree: number;
|
||||
in_degree: number;
|
||||
}
|
||||
|
||||
interface Channel {
|
||||
scid: string;
|
||||
source: string;
|
||||
destination: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
fee_base_msat: number;
|
||||
fee_proportional_millionths: number;
|
||||
htlc_minimim_msat: number;
|
||||
cltv_expiry_delta: number;
|
||||
htlc_maximum_msat: number;
|
||||
}
|
||||
|
||||
class LightningStatsImporter {
|
||||
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
|
||||
parser = new XMLParser();
|
||||
|
||||
async $run(): Promise<void> {
|
||||
logger.info(`Importing historical lightning stats`);
|
||||
|
||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||
logger.info('Caching funding txs for currently existing channels');
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||
|
||||
await this.$importHistoricalLightningStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate LN network stats for one day
|
||||
*/
|
||||
public async computeNetworkStats(timestamp: number, networkGraph): Promise<unknown> {
|
||||
// Node counts and network shares
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
|
||||
const sockets = node.addresses.split(',');
|
||||
for (const socket of sockets) {
|
||||
hasOnion = hasOnion || (socket.indexOf('torv3://') !== -1);
|
||||
hasClearnet = hasClearnet || (socket.indexOf('ipv4://') !== -1 || socket.indexOf('ipv6://') !== -1);
|
||||
}
|
||||
if (hasOnion && hasClearnet) {
|
||||
clearnetTorNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
// Channels and node historical stats
|
||||
const nodeStats = {};
|
||||
let capacity = 0;
|
||||
let avgFeeRate = 0;
|
||||
let avgBaseFee = 0;
|
||||
const capacities: number[] = [];
|
||||
const feeRates: number[] = [];
|
||||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
for (const channel of networkGraph.channels) {
|
||||
const short_id = channel.scid.slice(0, -2);
|
||||
|
||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||
if (!tx) {
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.source]) {
|
||||
nodeStats[channel.source] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
if (!nodeStats[channel.destination]) {
|
||||
nodeStats[channel.destination] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
|
||||
nodeStats[channel.source].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.source].channels++;
|
||||
nodeStats[channel.destination].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.destination].channels++;
|
||||
|
||||
if (!alreadyCountedChannels[short_id]) {
|
||||
capacity += Math.round(tx.value * 100000000);
|
||||
capacities.push(Math.round(tx.value * 100000000));
|
||||
alreadyCountedChannels[short_id] = true;
|
||||
}
|
||||
|
||||
if (channel.fee_proportional_millionths < 5000) {
|
||||
avgFeeRate += channel.fee_proportional_millionths;
|
||||
feeRates.push(channel.fee_proportional_millionths);
|
||||
}
|
||||
|
||||
if (channel.fee_base_msat < 5000) {
|
||||
avgBaseFee += channel.fee_base_msat;
|
||||
baseFees.push(channel.fee_base_msat);
|
||||
}
|
||||
}
|
||||
|
||||
avgFeeRate /= networkGraph.channels.length;
|
||||
avgBaseFee /= networkGraph.channels.length;
|
||||
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
||||
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||
const avgCapacity = Math.round(capacity / capacities.length);
|
||||
|
||||
let query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
clearnet_tor_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
]);
|
||||
|
||||
for (const public_key of Object.keys(nodeStats)) {
|
||||
query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
public_key,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
}
|
||||
|
||||
return {
|
||||
added: timestamp,
|
||||
node_count: networkGraph.nodes.length
|
||||
};
|
||||
}
|
||||
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
let latestNodeCount = 1;
|
||||
|
||||
const fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(added) as added, node_count FROM lightning_stats');
|
||||
const existingStatsTimestamps = {};
|
||||
for (const row of rows) {
|
||||
existingStatsTimestamps[row.added] = rows[0];
|
||||
}
|
||||
|
||||
for (const filename of fileList) {
|
||||
const timestamp = parseInt(filename.split('_')[1], 10);
|
||||
|
||||
// Stats exist already, don't calculate/insert them
|
||||
if (existingStatsTimestamps[timestamp] !== undefined) {
|
||||
latestNodeCount = existingStatsTimestamps[timestamp].node_count;
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Processing ${this.topologiesFolder}/${filename}`);
|
||||
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||
|
||||
let graph;
|
||||
if (filename.indexOf('.json') !== -1) {
|
||||
try {
|
||||
graph = JSON.parse(fileContent);
|
||||
} catch (e) {
|
||||
logger.debug(`Invalid topology file, cannot parse the content`);
|
||||
}
|
||||
} else {
|
||||
graph = this.parseFile(fileContent);
|
||||
if (!graph) {
|
||||
logger.debug(`Invalid topology file, cannot parse the content`);
|
||||
continue;
|
||||
}
|
||||
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
|
||||
}
|
||||
|
||||
if (timestamp > 1556316000) {
|
||||
// "No, the reason most likely is just that I started collection in 2019,
|
||||
// so what I had before that is just the survivors from before, which weren't that many"
|
||||
const diffRatio = graph.nodes.length / latestNodeCount;
|
||||
if (diffRatio < 0.9) {
|
||||
// Ignore drop of more than 90% of the node count as it's probably a missing data point
|
||||
continue;
|
||||
}
|
||||
}
|
||||
latestNodeCount = graph.nodes.length;
|
||||
|
||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.channels.length} channels`);
|
||||
|
||||
// Cache funding txs
|
||||
logger.debug(`Caching funding txs for ${datestr}`);
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.channels.map(channel => channel.scid.slice(0, -2)));
|
||||
|
||||
logger.debug(`Generating LN network stats for ${datestr}`);
|
||||
const stat = await this.computeNetworkStats(timestamp, graph);
|
||||
|
||||
existingStatsTimestamps[timestamp] = stat;
|
||||
}
|
||||
|
||||
logger.info(`Lightning network stats historical import completed`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the file content into XML, and return a list of nodes and channels
|
||||
*/
|
||||
private parseFile(fileContent): any {
|
||||
const graph = this.parser.parse(fileContent);
|
||||
if (Object.keys(graph).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nodes: Node[] = [];
|
||||
const channels: Channel[] = [];
|
||||
|
||||
// If there is only one entry, the parser does not return an array, so we override this
|
||||
if (!Array.isArray(graph.graphml.graph.node)) {
|
||||
graph.graphml.graph.node = [graph.graphml.graph.node];
|
||||
}
|
||||
if (!Array.isArray(graph.graphml.graph.edge)) {
|
||||
graph.graphml.graph.edge = [graph.graphml.graph.edge];
|
||||
}
|
||||
|
||||
for (const node of graph.graphml.graph.node) {
|
||||
if (!node.data) {
|
||||
continue;
|
||||
}
|
||||
nodes.push({
|
||||
id: node.data[0],
|
||||
timestamp: node.data[1],
|
||||
features: node.data[2],
|
||||
rgb_color: node.data[3],
|
||||
alias: node.data[4],
|
||||
addresses: node.data[5],
|
||||
out_degree: node.data[6],
|
||||
in_degree: node.data[7],
|
||||
});
|
||||
}
|
||||
|
||||
for (const channel of graph.graphml.graph.edge) {
|
||||
if (!channel.data) {
|
||||
continue;
|
||||
}
|
||||
channels.push({
|
||||
scid: channel.data[0],
|
||||
source: channel.data[1],
|
||||
destination: channel.data[2],
|
||||
timestamp: channel.data[3],
|
||||
features: channel.data[4],
|
||||
fee_base_msat: channel.data[5],
|
||||
fee_proportional_millionths: channel.data[6],
|
||||
htlc_minimim_msat: channel.data[7],
|
||||
cltv_expiry_delta: channel.data[8],
|
||||
htlc_maximum_msat: channel.data[9],
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: nodes,
|
||||
channels: channels,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsImporter;
|
Loading…
Add table
Reference in a new issue