mirror of
https://github.com/mempool/mempool.git
synced 2025-02-22 22:25:34 +01:00
Merge branch 'master' into fix/difficulty-api
This commit is contained in:
commit
738d1f8007
15 changed files with 193 additions and 111 deletions
|
@ -4,7 +4,7 @@ import logger from '../logger';
|
|||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 39;
|
||||
private static currentVersion = 40;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
@ -342,6 +342,12 @@ class DatabaseMigration {
|
|||
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
|
||||
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 40 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -288,21 +288,36 @@ class ChannelsApi {
|
|||
|
||||
const channels: any[] = []
|
||||
for (const row of allChannels) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channels.push({
|
||||
status: row.status,
|
||||
closing_reason: row.closing_reason,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
});
|
||||
let channel;
|
||||
if (index >= 0) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channel = {
|
||||
status: row.status,
|
||||
closing_reason: row.closing_reason,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
};
|
||||
} else if (index === -1) {
|
||||
channel = {
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
channels.push(channel);
|
||||
}
|
||||
|
||||
return channels;
|
||||
|
|
|
@ -47,8 +47,17 @@ class ChannelsRoutes {
|
|||
res.status(400).send('Missing parameter: public_key');
|
||||
return;
|
||||
}
|
||||
|
||||
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
|
||||
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
|
||||
|
||||
if (index < -1) {
|
||||
res.status(400).send('Invalid index');
|
||||
}
|
||||
if (['open', 'active', 'closed'].includes(status) === false) {
|
||||
res.status(400).send('Invalid status');
|
||||
}
|
||||
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
|
||||
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
|
||||
res.header('Pragma', 'public');
|
||||
|
|
|
@ -115,17 +115,13 @@ class NodesApi {
|
|||
|
||||
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
node_stats.capacity
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
nodes.capacity
|
||||
FROM nodes
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
@ -133,16 +129,14 @@ class NodesApi {
|
|||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
@ -163,17 +157,13 @@ class NodesApi {
|
|||
|
||||
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
nodes.channels
|
||||
FROM nodes
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100;
|
||||
`;
|
||||
|
@ -181,16 +171,14 @@ class NodesApi {
|
|||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
@ -260,8 +248,8 @@ class NodesApi {
|
|||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const publicKeySearch = search.replace('%', '') + '%';
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR MATCH nodes.alias_search AGAINST (? IN BOOLEAN MODE) GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const query = `SELECT public_key, alias, capacity, channels FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
|
@ -276,7 +264,7 @@ class NodesApi {
|
|||
|
||||
// List all channels and the two linked ISP
|
||||
query = `
|
||||
SELECT short_id, capacity,
|
||||
SELECT short_id, channels.capacity,
|
||||
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
|
||||
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
|
||||
FROM channels
|
||||
|
@ -391,17 +379,11 @@ class NodesApi {
|
|||
public async $getNodesPerCountry(countryId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
|
@ -426,17 +408,10 @@ class NodesApi {
|
|||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
|
@ -464,7 +439,6 @@ class NodesApi {
|
|||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY country_id
|
||||
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
|
||||
`;
|
||||
|
@ -555,7 +529,7 @@ class NodesApi {
|
|||
}
|
||||
|
||||
private aliasToSearchText(str: string): string {
|
||||
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '');
|
||||
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import { ILightningApi } from '../../../api/lightning/lightning-api.interface';
|
|||
import { isIP } from 'net';
|
||||
import { Common } from '../../../api/common';
|
||||
import channelsApi from '../../../api/explorer/channels.api';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
|
@ -32,7 +33,26 @@ class LightningStatsImporter {
|
|||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
const [nodesInDbRaw]: any[] = await DB.query(`SELECT public_key FROM nodes`);
|
||||
const nodesInDb = {};
|
||||
for (const node of nodesInDbRaw) {
|
||||
nodesInDb[node.public_key] = node;
|
||||
}
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
// If we don't know about this node, insert it in db
|
||||
if (isHistorical === true && !nodesInDb[node.pub_key]) {
|
||||
await nodesApi.$saveNode({
|
||||
last_update: node.last_update,
|
||||
pub_key: node.pub_key,
|
||||
alias: node.alias,
|
||||
addresses: node.addresses,
|
||||
color: node.color,
|
||||
features: node.features,
|
||||
});
|
||||
nodesInDb[node.pub_key] = node;
|
||||
}
|
||||
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
|
@ -69,7 +89,7 @@ class LightningStatsImporter {
|
|||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id, created FROM channels`);
|
||||
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id FROM channels`);
|
||||
const channelsInDb = {};
|
||||
for (const channel of channelsInDbRaw) {
|
||||
channelsInDb[channel.short_id] = channel;
|
||||
|
@ -84,29 +104,19 @@ class LightningStatsImporter {
|
|||
continue;
|
||||
}
|
||||
|
||||
// Channel is already in db, check if we need to update 'created' field
|
||||
if (isHistorical === true) {
|
||||
//@ts-ignore
|
||||
if (channelsInDb[short_id] && channel.timestamp < channel.created) {
|
||||
await DB.query(`
|
||||
UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.short_id = ?`,
|
||||
//@ts-ignore
|
||||
[channel.timestamp, short_id]
|
||||
);
|
||||
} else if (!channelsInDb[short_id]) {
|
||||
await channelsApi.$saveChannel({
|
||||
channel_id: short_id,
|
||||
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
|
||||
//@ts-ignore
|
||||
last_update: channel.timestamp,
|
||||
node1_pub: channel.node1_pub,
|
||||
node2_pub: channel.node2_pub,
|
||||
capacity: (tx.value * 100000000).toString(),
|
||||
node1_policy: null,
|
||||
node2_policy: null,
|
||||
}, 0);
|
||||
channelsInDb[channel.channel_id] = channel;
|
||||
}
|
||||
// If we don't know about this channel, insert it in db
|
||||
if (isHistorical === true && !channelsInDb[short_id]) {
|
||||
await channelsApi.$saveChannel({
|
||||
channel_id: short_id,
|
||||
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
|
||||
last_update: channel.last_update,
|
||||
node1_pub: channel.node1_pub,
|
||||
node2_pub: channel.node2_pub,
|
||||
capacity: (tx.value * 100000000).toString(),
|
||||
node1_policy: null,
|
||||
node2_policy: null,
|
||||
}, 0);
|
||||
channelsInDb[channel.channel_id] = channel;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.node1_pub]) {
|
||||
|
@ -269,6 +279,17 @@ class LightningStatsImporter {
|
|||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
|
||||
if (!isHistorical) {
|
||||
await DB.query(
|
||||
`UPDATE nodes SET capacity = ?, channels = ? WHERE public_key = ?`,
|
||||
[
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
public_key,
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -281,6 +302,7 @@ class LightningStatsImporter {
|
|||
* Import topology files LN historical data into the database
|
||||
*/
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
logger.debug('Run the historical importer');
|
||||
try {
|
||||
let fileList: string[] = [];
|
||||
try {
|
||||
|
@ -294,7 +316,7 @@ class LightningStatsImporter {
|
|||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, node_count
|
||||
SELECT UNIX_TIMESTAMP(added) AS added
|
||||
FROM lightning_stats
|
||||
ORDER BY added DESC
|
||||
`);
|
||||
|
@ -391,12 +413,16 @@ class LightningStatsImporter {
|
|||
});
|
||||
}
|
||||
|
||||
let rgb = node.rgb_color ?? '#000000';
|
||||
if (rgb.indexOf('#') === -1) {
|
||||
rgb = `#${rgb}`;
|
||||
}
|
||||
newGraph.nodes.push({
|
||||
last_update: node.timestamp ?? 0,
|
||||
pub_key: node.id ?? null,
|
||||
alias: node.alias ?? null,
|
||||
alias: node.alias ?? node.id.slice(0, 20),
|
||||
addresses: addresses,
|
||||
color: node.rgb_color ?? null,
|
||||
color: rgb,
|
||||
features: {},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<form [formGroup]="searchForm" (submit)="searchForm.valid && search()" novalidate>
|
||||
<div class="d-flex">
|
||||
<div class="search-box-container mr-2">
|
||||
<input (focus)="focus$.next($any($event).target.value)" (click)="click$.next($any($event).target.value)" formControlName="searchText" type="text" class="form-control" i18n-placeholder="search-form.searchbar-placeholder" placeholder="TXID, block height, hash or address">
|
||||
<input (focus)="focus$.next($any($event).target.value)" (click)="click$.next($any($event).target.value)" formControlName="searchText" type="text" class="form-control" i18n-placeholder="search-form.searchbar-placeholder" placeholder="Search the full Bitcoin ecosystem">
|
||||
|
||||
<app-search-results #searchResults [results]="typeAhead$ | async" [searchTerm]="searchForm.get('searchText').value" (selectedResult)="selectedResult($event)"></app-search-results>
|
||||
|
||||
|
|
|
@ -105,10 +105,10 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
|||
}, 10);
|
||||
}
|
||||
|
||||
this.transactions.forEach((tx, i) => {
|
||||
this.transactions.forEach((tx) => {
|
||||
tx['@voutLimit'] = true;
|
||||
tx['@vinLimit'] = true;
|
||||
if (this.outspends[i]) {
|
||||
if (tx['addressValue'] !== undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -119,20 +119,30 @@
|
|||
</div>
|
||||
|
||||
<div *ngIf="!error">
|
||||
<div class="row">
|
||||
<div class="row" *ngIf="node.as_number">
|
||||
<div class="col-sm">
|
||||
<app-nodes-channels-map [style]="'nodepage'" [publicKey]="node.public_key"></app-nodes-channels-map>
|
||||
<app-nodes-channels-map [style]="'nodepage'" [publicKey]="node.public_key" [hasLocation]="!!node.as_number"></app-nodes-channels-map>
|
||||
</div>
|
||||
<div class="col-sm">
|
||||
<app-node-statistics-chart [publicKey]="node.public_key"></app-node-statistics-chart>
|
||||
</div>
|
||||
</div>
|
||||
<div *ngIf="!node.as_number">
|
||||
<app-node-statistics-chart [publicKey]="node.public_key"></app-node-statistics-chart>
|
||||
</div>
|
||||
|
||||
<h2 i18n="lightning.active-channels-map">Active channels map</h2>
|
||||
<app-node-channels style="display:block;margin-bottom: 40px" [publicKey]="node.public_key"></app-node-channels>
|
||||
|
||||
<div class="d-flex justify-content-between">
|
||||
<h2>Channels ({{ channelsListStatus === 'open' ? node.opened_channel_count : node.closed_channel_count }})</h2>
|
||||
<h2 *ngIf="channelsListStatus === 'open'">
|
||||
<span i18n="lightning.open-channels">Open channels</span>
|
||||
<span> ({{ node.opened_channel_count }})</span>
|
||||
</h2>
|
||||
<h2 *ngIf="channelsListStatus === 'closed'">
|
||||
<span i18n="lightning.open-channels">Closed channels</span>
|
||||
<span> ({{ node.closed_channel_count }})</span>
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
<app-channels-list [publicKey]="node.public_key"
|
||||
|
|
|
@ -105,6 +105,9 @@
|
|||
top: 50%;
|
||||
left: calc(50% - 15px);
|
||||
z-index: 100;
|
||||
@media (max-width: 767.98px) {
|
||||
top: 550px;
|
||||
}
|
||||
}
|
||||
.loading-spinner.widget {
|
||||
position: absolute;
|
||||
|
@ -115,4 +118,22 @@
|
|||
@media (max-width: 767.98px) {
|
||||
top: 250px;
|
||||
}
|
||||
}
|
||||
}
|
||||
.loading-spinner.nodepage {
|
||||
position: absolute;
|
||||
top: 200px;
|
||||
z-index: 100;
|
||||
width: 100%;
|
||||
left: 0;
|
||||
}
|
||||
|
||||
.loading-spinner.channelpage {
|
||||
position: absolute;
|
||||
top: 400px;
|
||||
z-index: 100;
|
||||
width: 100%;
|
||||
left: 0;
|
||||
@media (max-width: 767.98px) {
|
||||
top: 450px;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ export class NodesChannelsMap implements OnInit {
|
|||
@Input() publicKey: string | undefined;
|
||||
@Input() channel: any[] = [];
|
||||
@Input() fitContainer = false;
|
||||
@Input() hasLocation = true;
|
||||
@Output() readyEvent = new EventEmitter();
|
||||
|
||||
channelsObservable: Observable<any>;
|
||||
|
@ -32,7 +33,7 @@ export class NodesChannelsMap implements OnInit {
|
|||
channelColor = '#466d9d';
|
||||
channelCurve = 0;
|
||||
nodeSize = 4;
|
||||
isLoading = true;
|
||||
isLoading = false;
|
||||
|
||||
chartInstance = undefined;
|
||||
chartOptions: EChartsOption = {};
|
||||
|
@ -73,6 +74,11 @@ export class NodesChannelsMap implements OnInit {
|
|||
this.channelsObservable = this.activatedRoute.paramMap
|
||||
.pipe(
|
||||
switchMap((params: ParamMap) => {
|
||||
this.isLoading = true;
|
||||
if (this.style === 'channelpage' && this.channel.length === 0 || !this.hasLocation) {
|
||||
this.isLoading = false;
|
||||
}
|
||||
|
||||
return zip(
|
||||
this.assetsService.getWorldMapJson$,
|
||||
this.style !== 'channelpage' ? this.apiService.getChannelsGeo$(params.get('public_key') ?? undefined, this.style) : [''],
|
||||
|
|
|
@ -121,7 +121,7 @@ export class NodesNetworksChartComponent implements OnInit {
|
|||
left: 'center',
|
||||
top: 'center',
|
||||
};
|
||||
} else if (data.tor_nodes.length > 0) {
|
||||
} else if (this.widget && data.tor_nodes.length > 0) {
|
||||
title = {
|
||||
textStyle: {
|
||||
color: 'grey',
|
||||
|
|
|
@ -113,7 +113,7 @@ export class LightningStatisticsChartComponent implements OnInit {
|
|||
left: 'center',
|
||||
top: 'center'
|
||||
};
|
||||
} else if (data.channel_count.length > 0) {
|
||||
} else if (this.widget && data.channel_count.length > 0) {
|
||||
title = {
|
||||
textStyle: {
|
||||
color: 'grey',
|
||||
|
|
|
@ -385,7 +385,7 @@ DEBIAN_UNFURL_PKG+=(libxdamage-dev libxrandr-dev libgbm-dev libpango1.0-dev liba
|
|||
|
||||
# packages needed for mempool ecosystem
|
||||
FREEBSD_PKG=()
|
||||
FREEBSD_PKG+=(zsh sudo git screen curl wget calc neovim)
|
||||
FREEBSD_PKG+=(zsh sudo git git-lfs screen curl wget calc neovim)
|
||||
FREEBSD_PKG+=(openssh-portable py39-pip rust llvm90 jq base64 libzmq4)
|
||||
FREEBSD_PKG+=(boost-libs autoconf automake gmake gcc libevent libtool pkgconf)
|
||||
FREEBSD_PKG+=(nginx rsync py39-certbot-nginx mariadb105-server keybase)
|
||||
|
@ -976,15 +976,28 @@ osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-kill-all stop
|
|||
osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-start-all start
|
||||
osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-restart-all restart
|
||||
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d
|
||||
osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf
|
||||
|
||||
echo "[*] Installing newsyslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/syslog.d/newsyslog-mempool-backend.conf
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/syslog.d/newsyslog-mempool-nginx.conf
|
||||
case $OS in
|
||||
FreeBSD)
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d
|
||||
osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf
|
||||
|
||||
echo "[*] Installing newsyslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-backend.conf
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-nginx.conf
|
||||
|
||||
echo "[*] Creating log files"
|
||||
osSudo "${ROOT_USER}" newsyslog -C
|
||||
;;
|
||||
Debian)
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/linux/rsyslog.conf" /etc/rsyslog.d/10-mempool.conf
|
||||
osSudo "${ROOT_USER}" sed -i.orig -e 's/^\*\.\*;auth,authpriv\.none/*\.*;auth,authpriv\.none,local7\.none/' /etc/rsyslog.d/50-default.conf
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "[*] Installing Mempool crontab"
|
||||
osSudo "${ROOT_USER}" crontab -u "${MEMPOOL_USER}" "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool.crontab"
|
||||
|
@ -1267,7 +1280,7 @@ case $OS in
|
|||
osGroupCreate "${CLN_GROUP}"
|
||||
osUserCreate "${CLN_USER}" "${CLN_HOME}" "${CLN_GROUP}"
|
||||
osSudo "${ROOT_USER}" chsh -s `which zsh` "${CLN_USER}"
|
||||
osSudo "${CLN_USER}" touch "${CLN_HOME}/.zshrc"
|
||||
echo "export PATH=$PATH:$HOME/.local/bin" >> "${CLN_HOME}/.zshrc"
|
||||
osSudo "${ROOT_USER}" chown -R "${CLN_USER}:${CLN_GROUP}" "${CLN_HOME}"
|
||||
|
||||
echo "[*] Installing Core Lightning package"
|
||||
|
|
2
production/linux/rsyslog.conf
Normal file
2
production/linux/rsyslog.conf
Normal file
|
@ -0,0 +1,2 @@
|
|||
local7.info /var/log/mempool
|
||||
local7.* /var/log/mempool.debug
|
|
@ -1,4 +1,4 @@
|
|||
local7.>=notice |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops alerts
|
||||
local7.>=err |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops alerts
|
||||
local7.>=info |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops node100
|
||||
local7.>=info /var/log/mempool
|
||||
local7.* /var/log/mempool.debug
|
||||
|
|
Loading…
Add table
Reference in a new issue