Merge branch 'master' into mononaut/zero-value-tx-diagrams

This commit is contained in:
wiz 2022-12-26 12:24:09 +09:00 committed by GitHub
commit 1348e2318d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 357 additions and 218 deletions

View File

@ -1,20 +1,32 @@
version: 2 version: 2
updates: updates:
- package-ecosystem: npm - package-ecosystem: npm
directory: "/backend" directory: "/backend"
schedule: schedule:
interval: daily interval: daily
open-pull-requests-limit: 10 open-pull-requests-limit: 10
- package-ecosystem: npm ignore:
directory: "/frontend" - dependency-name: "*"
schedule: update-types: ["version-update:semver-major"]
interval: daily - package-ecosystem: npm
open-pull-requests-limit: 10 directory: "/frontend"
- package-ecosystem: docker schedule:
directory: "/docker/backend" interval: daily
schedule: open-pull-requests-limit: 10
interval: weekly ignore:
- package-ecosystem: "github-actions" - dependency-name: "*"
directory: "/" update-types: ["version-update:semver-major"]
schedule: - package-ecosystem: docker
interval: "weekly" directory: "/docker/backend"
schedule:
interval: daily
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]

View File

@ -2,7 +2,7 @@ name: Cypress Tests
on: on:
pull_request: pull_request:
types: [ opened, review_requested, synchronize ] types: [opened, review_requested, synchronize]
jobs: jobs:
cypress: cypress:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')" if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
@ -28,32 +28,32 @@ jobs:
name: E2E tests for ${{ matrix.module }} name: E2E tests for ${{ matrix.module }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v3
with: with:
path: ${{ matrix.module }} path: ${{ matrix.module }}
- name: Setup node - name: Setup node
uses: actions/setup-node@v2 uses: actions/setup-node@v3
with: with:
node-version: 16.15.0 node-version: 16.15.0
cache: 'npm' cache: "npm"
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
- name: Chrome browser tests (${{ matrix.module }}) - name: Chrome browser tests (${{ matrix.module }})
uses: cypress-io/github-action@v4 uses: cypress-io/github-action@v5
with: with:
tag: ${{ github.event_name }} tag: ${{ github.event_name }}
working-directory: ${{ matrix.module }}/frontend working-directory: ${{ matrix.module }}/frontend
build: npm run config:defaults:${{ matrix.module }} build: npm run config:defaults:${{ matrix.module }}
start: npm run start:local-staging start: npm run start:local-staging
wait-on: 'http://localhost:4200' wait-on: "http://localhost:4200"
wait-on-timeout: 120 wait-on-timeout: 120
record: true record: true
parallel: true parallel: true
spec: ${{ matrix.spec }} spec: ${{ matrix.spec }}
group: Tests on Chrome (${{ matrix.module }}) group: Tests on Chrome (${{ matrix.module }})
browser: "chrome" browser: "chrome"
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}' ci-build-id: "${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}"
env: env:
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }} COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}

View File

@ -85,7 +85,8 @@
"STATS_REFRESH_INTERVAL": 600, "STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600, "GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30, "LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200 "FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
}, },
"LND": { "LND": {
"TLS_CERT_PATH": "tls.cert", "TLS_CERT_PATH": "tls.cert",

View File

@ -101,7 +101,8 @@
"STATS_REFRESH_INTERVAL": 600, "STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600, "GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30, "LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200 "FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
}, },
"LND": { "LND": {
"TLS_CERT_PATH": "", "TLS_CERT_PATH": "",

View File

@ -742,7 +742,7 @@ class Blocks {
public async $indexCPFP(hash: string, height: number): Promise<void> { public async $indexCPFP(hash: string, height: number): Promise<void> {
let transactions; let transactions;
if (false/*Common.blocksSummariesIndexingEnabled()*/) { if (Common.blocksSummariesIndexingEnabled()) {
transactions = await this.$getStrippedBlockTransactions(hash); transactions = await this.$getStrippedBlockTransactions(hash);
const rawBlock = await bitcoinApi.$getRawBlock(hash); const rawBlock = await bitcoinApi.$getRawBlock(hash);
const block = Block.fromBuffer(rawBlock); const block = Block.fromBuffer(rawBlock);
@ -751,10 +751,11 @@ class Blocks {
txMap[tx.getId()] = tx; txMap[tx.getId()] = tx;
} }
for (const tx of transactions) { for (const tx of transactions) {
// convert from bitcoinjs to esplora vin format
if (txMap[tx.txid]?.ins) { if (txMap[tx.txid]?.ins) {
tx.vin = txMap[tx.txid].ins.map(vin => { tx.vin = txMap[tx.txid].ins.map(vin => {
return { return {
txid: vin.hash txid: vin.hash.slice().reverse().toString('hex')
}; };
}); });
} }
@ -763,6 +764,7 @@ class Blocks {
const block = await bitcoinClient.getBlock(hash, 2); const block = await bitcoinClient.getBlock(hash, 2);
transactions = block.tx.map(tx => { transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4; tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx; return tx;
}); });
} }
@ -778,9 +780,9 @@ class Blocks {
totalFee += tx?.fee || 0; totalFee += tx?.fee || 0;
totalVSize += tx.vsize; totalVSize += tx.vsize;
}); });
const effectiveFeePerVsize = (totalFee * 100_000_000) / totalVSize; const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) { if (cluster.length > 1) {
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: (tx.fee || 0) * 100_000_000 }; }), effectiveFeePerVsize); await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }), effectiveFeePerVsize);
for (const tx of cluster) { for (const tx of cluster) {
await transactionRepository.$setCluster(tx.txid, cluster[0].txid); await transactionRepository.$setCluster(tx.txid, cluster[0].txid);
} }

View File

@ -538,6 +538,10 @@ class NodesApi {
const IPSIds = ISPId.split(','); const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]); const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {}; const nodes = {};
const intISPIds: number[] = []; const intISPIds: number[] = [];

View File

@ -1,17 +1,14 @@
import logger from '../logger'; import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces'; import { MempoolBlock, TransactionExtended, ThreadTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common'; import { Common } from './common';
import config from '../config'; import config from '../config';
import { StaticPool } from 'node-worker-threads-pool'; import { Worker } from 'worker_threads';
import path from 'path'; import path from 'path';
class MempoolBlocks { class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = []; private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = []; private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private makeTemplatesPool = new StaticPool({ private txSelectionWorker: Worker | null = null;
size: 1,
task: path.resolve(__dirname, './tx-selection-worker.js'),
});
constructor() {} constructor() {}
@ -146,27 +143,159 @@ class MempoolBlocks {
return mempoolBlockDeltas; return mempoolBlockDeltas;
} }
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> { public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }): Promise<void> {
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest }); // prepare a stripped down version of the mempool with only the minimum necessary data
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks); // to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
// copy CPFP info across to main thread's mempool Object.values(newMempool).forEach(entry => {
Object.keys(newMempool).forEach((txid) => { strippedMempool[entry.txid] = {
if (newMempool[txid] && mempool[txid]) { txid: entry.txid,
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize; fee: entry.fee,
newMempool[txid].ancestors = mempool[txid].ancestors; weight: entry.weight,
newMempool[txid].descendants = mempool[txid].descendants; feePerVsize: entry.fee / (entry.weight / 4),
newMempool[txid].bestDescendant = mempool[txid].bestDescendant; effectiveFeePerVsize: entry.fee / (entry.weight / 4),
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked; vin: entry.vin.map(v => v.txid),
} };
}); });
this.mempoolBlocks = blocks; // (re)initialize tx selection worker thread
if (!this.txSelectionWorker) {
this.txSelectionWorker = new Worker(path.resolve(__dirname, './tx-selection-worker.js'));
// if the thread throws an unexpected error, or exits for any other reason,
// reset worker state so that it will be re-initialized on the next run
this.txSelectionWorker.once('error', () => {
this.txSelectionWorker = null;
});
this.txSelectionWorker.once('exit', () => {
this.txSelectionWorker = null;
});
}
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
this.processBlockTemplates(newMempool, blocks, clusters);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
} catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
public async updateBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, added: TransactionExtended[], removed: string[]): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
return this.makeBlockTemplates(newMempool);
}
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: ThreadTransaction[] = added.map(entry => {
return {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
});
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
this.processBlockTemplates(newMempool, blocks, clusters);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
private processBlockTemplates(mempool, blocks, clusters): void {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
if (tx.cpfpRoot && tx.cpfpRoot in clusters) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (txid === tx.txid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: mempool[txid].weight,
};
if (matched) {
descendants.push(relative);
} else {
ancestors.push(relative);
}
}
});
mempool[tx.txid].ancestors = ancestors;
mempool[tx.txid].descendants = descendants;
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
}
});
});
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), undefined, undefined, blockIndex);
});
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, mempoolBlocks);
this.mempoolBlocks = mempoolBlocks;
this.mempoolBlockDeltas = deltas; this.mempoolBlockDeltas = deltas;
} }
private dataToMempoolBlocks(transactions: TransactionExtended[], private dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions { blockSize: number | undefined, blockWeight: number | undefined, blocksIndex: number): MempoolBlockWithTransactions {
let totalSize = blockSize || 0;
let totalWeight = blockWeight || 0;
if (blockSize === undefined && blockWeight === undefined) {
totalSize = 0;
totalWeight = 0;
transactions.forEach(tx => {
totalSize += tx.size;
totalWeight += tx.weight;
});
}
let rangeLength = 4; let rangeLength = 4;
if (blocksIndex === 0) { if (blocksIndex === 0) {
rangeLength = 8; rangeLength = 8;
@ -177,8 +306,8 @@ class MempoolBlocks {
rangeLength = 8; rangeLength = 8;
} }
return { return {
blockSize: blockSize, blockSize: totalSize,
blockVSize: blockWeight / 4, blockVSize: totalWeight / 4,
nTx: transactions.length, nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0), totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE), medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),

View File

@ -21,7 +21,7 @@ class Mempool {
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[], private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined; deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[], private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined; deletedTransactions: TransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = []; private txPerSecondArray: number[] = [];
private txPerSecond: number = 0; private txPerSecond: number = 0;

View File

@ -1,17 +1,30 @@
import config from '../config'; import config from '../config';
import logger from '../logger'; import logger from '../logger';
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces'; import { ThreadTransaction, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap'; import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common'; import { Common } from './common';
import { parentPort } from 'worker_threads'; import { parentPort } from 'worker_threads';
let mempool: { [txid: string]: ThreadTransaction } = {};
if (parentPort) { if (parentPort) {
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => { parentPort.on('message', (params) => {
const { mempool, blocks } = makeBlockTemplates(params); if (params.type === 'set') {
mempool = params.mempool;
} else if (params.type === 'update') {
params.added.forEach(tx => {
mempool[tx.txid] = tx;
});
params.removed.forEach(txid => {
delete mempool[txid];
});
}
const { blocks, clusters } = makeBlockTemplates(mempool);
// return the result to main thread. // return the result to main thread.
if (parentPort) { if (parentPort) {
parentPort.postMessage({ mempool, blocks }); parentPort.postMessage({ blocks, clusters });
} }
}); });
} }
@ -19,35 +32,24 @@ if (parentPort) {
/* /*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core * Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp) * (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*
* blockLimit: number of blocks to build in total.
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
* condenseRest: whether to ignore excess transactions or append them to the final block.
*/ */
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null }) function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } { : { blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } } {
const start = Date.now(); const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {}; const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = []; const mempoolArray: AuditTransaction[] = [];
const restOfArray: TransactionExtended[] = []; const restOfArray: ThreadTransaction[] = [];
const cpfpClusters: { [root: string]: string[] } = {};
let weight = 0;
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
// grab the top feerate txs up to maxWeight // grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => { Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
weight += tx.weight;
if (weight >= maxWeight) {
restOfArray.push(tx);
return;
}
// initializing everything up front helps V8 optimize property access later // initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = { auditPool[tx.txid] = {
txid: tx.txid, txid: tx.txid,
fee: tx.fee, fee: tx.fee,
size: tx.size,
weight: tx.weight, weight: tx.weight,
feePerVsize: tx.feePerVsize, feePerVsize: tx.feePerVsize,
effectiveFeePerVsize: tx.feePerVsize,
vin: tx.vin, vin: tx.vin,
relativesSet: false, relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(), ancestorMap: new Map<string, AuditTransaction>(),
@ -74,7 +76,7 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
// Build blocks by greedily choosing the highest feerate package // Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score) // (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: MempoolBlockWithTransactions[] = []; const blocks: ThreadTransaction[][] = [];
let blockWeight = 4000; let blockWeight = 4000;
let blockSize = 0; let blockSize = 0;
let transactions: AuditTransaction[] = []; let transactions: AuditTransaction[] = [];
@ -82,7 +84,7 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
let overflow: AuditTransaction[] = []; let overflow: AuditTransaction[] = [];
let failures = 0; let failures = 0;
let top = 0; let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) { while ((top < mempoolArray.length || !modified.isEmpty())) {
// skip invalid transactions // skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) { while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++; top++;
@ -106,44 +108,36 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
if (nextTx && !nextTx?.used) { if (nextTx && !nextTx?.used) {
// Check if the package fits into this block // Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) { if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
blockWeight += nextTx.ancestorWeight;
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values()); const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
const descendants: AuditTransaction[] = [];
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count) // sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx]; const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
let isCluster = false;
if (sortedTxSet.length > 1) {
cpfpClusters[nextTx.txid] = sortedTxSet.map(tx => tx.txid);
isCluster = true;
}
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4); const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const used: AuditTransaction[] = [];
while (sortedTxSet.length) { while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop(); const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid]; const mempoolTx = mempool[ancestor.txid];
if (ancestor && !ancestor?.used) { ancestor.used = true;
ancestor.used = true; ancestor.usedBy = nextTx.txid;
// update original copy of this tx with effective fee rate & relatives data // update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate; mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.ancestors = sortedTxSet.map((a) => { if (isCluster) {
return { mempoolTx.cpfpRoot = nextTx.txid;
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
}).reverse();
mempoolTx.descendants = descendants.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
descendants.push(ancestor);
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
} }
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
} }
// remove these as valid package ancestors for any descendants remaining in the mempool // remove these as valid package ancestors for any descendants remaining in the mempool
if (sortedTxSet.length) { if (used.length) {
sortedTxSet.forEach(tx => { used.forEach(tx => {
updateDescendants(tx, auditPool, modified); updateDescendants(tx, auditPool, modified);
}); });
} }
@ -159,10 +153,10 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
// this block is full // this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000); const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
const queueEmpty = top >= mempoolArray.length && modified.isEmpty(); const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) { if ((exceededPackageTries || queueEmpty) && blocks.length < 7) {
// construct this block // construct this block
if (transactions.length) { if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); blocks.push(transactions.map(t => mempool[t.txid]));
} }
// reset for the next block // reset for the next block
transactions = []; transactions = [];
@ -181,55 +175,40 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
overflow = []; overflow = [];
} }
} }
if (condenseRest) { // pack any leftover transactions into the last block
// pack any leftover transactions into the last block for (const tx of overflow) {
for (const tx of overflow) { if (!tx || tx?.used) {
if (!tx || tx?.used) { continue;
continue;
}
blockWeight += tx.weight;
blockSize += tx.size;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
} }
const blockTransactions = transactions.map(t => mempool[t.txid]); blockWeight += tx.weight;
restOfArray.forEach(tx => { const mempoolTx = mempool[tx.txid];
blockWeight += tx.weight; // update original copy of this tx with effective fee rate & relatives data
blockSize += tx.size; mempoolTx.effectiveFeePerVsize = tx.score;
tx.effectiveFeePerVsize = tx.feePerVsize; if (tx.ancestorMap.size > 0) {
tx.cpfpChecked = false; cpfpClusters[tx.txid] = Array.from(tx.ancestorMap?.values()).map(a => a.txid);
tx.ancestors = []; mempoolTx.cpfpRoot = tx.txid;
tx.bestDescendant = null;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
} }
transactions = []; mempoolTx.cpfpChecked = true;
} else if (transactions.length) { transactions.push(tx);
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); tx.used = true;
} }
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(blockTransactions);
}
transactions = [];
const end = Date.now(); const end = Date.now();
const time = end - start; const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds'); logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return { return { blocks, clusters: cpfpClusters };
mempool,
blocks
};
} }
// traverse in-mempool ancestors // traverse in-mempool ancestors
@ -239,9 +218,9 @@ function setRelatives(
mempool: { [txid: string]: AuditTransaction }, mempool: { [txid: string]: AuditTransaction },
): void { ): void {
for (const parent of tx.vin) { for (const parent of tx.vin) {
const parentTx = mempool[parent.txid]; const parentTx = mempool[parent];
if (parentTx && !tx.ancestorMap?.has(parent.txid)) { if (parentTx && !tx.ancestorMap?.has(parent)) {
tx.ancestorMap.set(parent.txid, parentTx); tx.ancestorMap.set(parent, parentTx);
parentTx.children.add(tx); parentTx.children.add(tx);
// visit each node only once // visit each node only once
if (!parentTx.relativesSet) { if (!parentTx.relativesSet) {
@ -313,26 +292,3 @@ function updateDescendants(
} }
} }
} }
function dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}

View File

@ -251,7 +251,7 @@ class WebsocketHandler {
} }
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) { if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true); await mempoolBlocks.updateBlockTemplates(newMempool, newTransactions, deletedTransactions.map(tx => tx.txid));
} else { } else {
mempoolBlocks.updateMempoolBlocks(newMempool); mempoolBlocks.updateMempoolBlocks(newMempool);
} }
@ -419,7 +419,7 @@ class WebsocketHandler {
const _memPool = memPool.getMempool(); const _memPool = memPool.getMempool();
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) { if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2); await mempoolBlocks.makeBlockTemplates(_memPool);
} else { } else {
mempoolBlocks.updateMempoolBlocks(_memPool); mempoolBlocks.updateMempoolBlocks(_memPool);
} }
@ -439,7 +439,7 @@ class WebsocketHandler {
}; };
}) : []; }) : [];
BlocksSummariesRepository.$saveSummary({ BlocksSummariesRepository.$saveTemplate({
height: block.height, height: block.height,
template: { template: {
id: block.id, id: block.id,
@ -462,13 +462,15 @@ class WebsocketHandler {
} }
} }
const removed: string[] = [];
// Update mempool to remove transactions included in the new block // Update mempool to remove transactions included in the new block
for (const txId of txIds) { for (const txId of txIds) {
delete _memPool[txId]; delete _memPool[txId];
removed.push(txId);
} }
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) { if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2); await mempoolBlocks.updateBlockTemplates(_memPool, [], removed);
} else { } else {
mempoolBlocks.updateMempoolBlocks(_memPool); mempoolBlocks.updateMempoolBlocks(_memPool);
} }

View File

@ -44,6 +44,7 @@ interface IConfig {
GRAPH_REFRESH_INTERVAL: number; GRAPH_REFRESH_INTERVAL: number;
LOGGER_UPDATE_INTERVAL: number; LOGGER_UPDATE_INTERVAL: number;
FORENSICS_INTERVAL: number; FORENSICS_INTERVAL: number;
FORENSICS_RATE_LIMIT: number;
}; };
LND: { LND: {
TLS_CERT_PATH: string; TLS_CERT_PATH: string;
@ -205,6 +206,7 @@ const defaults: IConfig = {
'GRAPH_REFRESH_INTERVAL': 600, 'GRAPH_REFRESH_INTERVAL': 600,
'LOGGER_UPDATE_INTERVAL': 30, 'LOGGER_UPDATE_INTERVAL': 30,
'FORENSICS_INTERVAL': 43200, 'FORENSICS_INTERVAL': 43200,
'FORENSICS_RATE_LIMIT': 20,
}, },
'LND': { 'LND': {
'TLS_CERT_PATH': '', 'TLS_CERT_PATH': '',

View File

@ -81,10 +81,10 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
export interface AuditTransaction { export interface AuditTransaction {
txid: string; txid: string;
fee: number; fee: number;
size: number;
weight: number; weight: number;
feePerVsize: number; feePerVsize: number;
vin: IEsploraApi.Vin[]; effectiveFeePerVsize: number;
vin: string[];
relativesSet: boolean; relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>; ancestorMap: Map<string, AuditTransaction>;
children: Set<AuditTransaction>; children: Set<AuditTransaction>;
@ -96,6 +96,17 @@ export interface AuditTransaction {
modifiedNode: HeapNode<AuditTransaction>; modifiedNode: HeapNode<AuditTransaction>;
} }
export interface ThreadTransaction {
txid: string;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize?: number;
vin: string[];
cpfpRoot?: string;
cpfpChecked?: boolean;
}
export interface Ancestor { export interface Ancestor {
txid: string; txid: string;
weight: number; weight: number;

View File

@ -17,19 +17,16 @@ class BlocksSummariesRepository {
return undefined; return undefined;
} }
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) { public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id ?? params.template?.id; const blockId = params.mined?.id;
try { try {
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`); const transactions = JSON.stringify(params.mined?.transactions || []);
if (dbSummary.length === 0) { // First insertion await DB.query(`
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [ INSERT INTO blocks_summaries (height, id, transactions, template)
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? []) VALUE (?, ?, ?, ?)
]); ON DUPLICATE KEY UPDATE
} else if (params.mined !== undefined) { // Update mined block summary transactions = ?
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]); `, [params.height, blockId, transactions, '[]', transactions]);
} else if (params.template !== undefined) { // Update template block summary
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
}
} catch (e: any) { } catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`); logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
@ -40,6 +37,26 @@ class BlocksSummariesRepository {
} }
} }
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
const blockId = params.template?.id;
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
}
public async $getIndexedSummariesId(): Promise<string[]> { public async $getIndexedSummariesId(): Promise<string[]> {
try { try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`); const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);

View File

@ -44,7 +44,9 @@ class TransactionRepository {
const [rows]: any = await DB.query(query, [txid]); const [rows]: any = await DB.query(query, [txid]);
if (rows.length) { if (rows.length) {
rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[]; rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[];
return this.convertCpfp(rows[0]); if (rows[0]?.txs?.length) {
return this.convertCpfp(rows[0]);
}
} }
} catch (e) { } catch (e) {
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e)); logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));

View File

@ -7,7 +7,6 @@ import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { Common } from '../../api/common'; import { Common } from '../../api/common';
import { ILightningApi } from '../../api/lightning/lightning-api.interface'; import { ILightningApi } from '../../api/lightning/lightning-api.interface';
const throttleDelay = 20; //ms
const tempCacheSize = 10000; const tempCacheSize = 10000;
class ForensicsService { class ForensicsService {
@ -91,7 +90,7 @@ class ForensicsService {
let outspends: IEsploraApi.Outspend[] | undefined; let outspends: IEsploraApi.Outspend[] | undefined;
try { try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id); outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
await Common.sleep$(throttleDelay); await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) { } catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`); logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue; continue;
@ -340,7 +339,7 @@ class ForensicsService {
let outspends: IEsploraApi.Outspend[] | undefined; let outspends: IEsploraApi.Outspend[] | undefined;
try { try {
outspends = await bitcoinApi.$getOutspends(input.txid); outspends = await bitcoinApi.$getOutspends(input.txid);
await Common.sleep$(throttleDelay); await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) { } catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`); logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
} }
@ -429,7 +428,7 @@ class ForensicsService {
if (temp) { if (temp) {
this.tempCached.push(txid); this.tempCached.push(txid);
} }
await Common.sleep$(throttleDelay); await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) { } catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`); logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
return null; return null;

View File

@ -31,6 +31,7 @@
"bootstrap": "~4.6.1", "bootstrap": "~4.6.1",
"browserify": "^17.0.0", "browserify": "^17.0.0",
"clipboard": "^2.0.11", "clipboard": "^2.0.11",
"cypress": "^12.1.0",
"domino": "^2.1.6", "domino": "^2.1.6",
"echarts": "~5.4.0", "echarts": "~5.4.0",
"echarts-gl": "^2.0.9", "echarts-gl": "^2.0.9",
@ -57,8 +58,8 @@
"typescript": "~4.6.4" "typescript": "~4.6.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"@cypress/schematic": "~2.3.0", "@cypress/schematic": "^2.4.0",
"cypress": "^11.2.0", "cypress": "^12.1.0",
"cypress-fail-on-console-error": "~4.0.2", "cypress-fail-on-console-error": "~4.0.2",
"cypress-wait-until": "^1.7.2", "cypress-wait-until": "^1.7.2",
"mock-socket": "~9.1.5", "mock-socket": "~9.1.5",
@ -3225,9 +3226,9 @@
} }
}, },
"node_modules/@cypress/schematic": { "node_modules/@cypress/schematic": {
"version": "2.3.0", "version": "2.4.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.3.0.tgz", "resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.4.0.tgz",
"integrity": "sha512-LBKX20MUUYF2Xu+1+KpVbLCoMvt2Osa80yQfonduVsLJ/p8JxtLHqufuf/ryJp9Gm9R5sDfk/YhHL+rB7a+gsg==", "integrity": "sha512-aor8hQ+gMXqx/ASdo7CUGo/sMEWwwfSRsLr99rM2GjvW+pZnCKKTnRG4UPf8Ro9SevLJj7KRZAZWxa5MAkJzZA==",
"optional": true, "optional": true,
"dependencies": { "dependencies": {
"@angular-devkit/architect": "^0.1402.1", "@angular-devkit/architect": "^0.1402.1",
@ -7019,9 +7020,9 @@
"peer": true "peer": true
}, },
"node_modules/cypress": { "node_modules/cypress": {
"version": "11.2.0", "version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-11.2.0.tgz", "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-u61UGwtu7lpsNWLUma/FKNOsrjcI6wleNmda/TyKHe0dOBcVjbCPlp1N6uwFZ0doXev7f/91YDpU9bqDCFeBLA==", "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"hasInstallScript": true, "hasInstallScript": true,
"optional": true, "optional": true,
"dependencies": { "dependencies": {
@ -7072,7 +7073,7 @@
"cypress": "bin/cypress" "cypress": "bin/cypress"
}, },
"engines": { "engines": {
"node": ">=12.0.0" "node": "^14.0.0 || ^16.0.0 || >=18.0.0"
} }
}, },
"node_modules/cypress-fail-on-console-error": { "node_modules/cypress-fail-on-console-error": {
@ -19345,9 +19346,9 @@
} }
}, },
"@cypress/schematic": { "@cypress/schematic": {
"version": "2.3.0", "version": "2.4.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.3.0.tgz", "resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.4.0.tgz",
"integrity": "sha512-LBKX20MUUYF2Xu+1+KpVbLCoMvt2Osa80yQfonduVsLJ/p8JxtLHqufuf/ryJp9Gm9R5sDfk/YhHL+rB7a+gsg==", "integrity": "sha512-aor8hQ+gMXqx/ASdo7CUGo/sMEWwwfSRsLr99rM2GjvW+pZnCKKTnRG4UPf8Ro9SevLJj7KRZAZWxa5MAkJzZA==",
"optional": true, "optional": true,
"requires": { "requires": {
"@angular-devkit/architect": "^0.1402.1", "@angular-devkit/architect": "^0.1402.1",
@ -22282,9 +22283,9 @@
"peer": true "peer": true
}, },
"cypress": { "cypress": {
"version": "11.2.0", "version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-11.2.0.tgz", "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-u61UGwtu7lpsNWLUma/FKNOsrjcI6wleNmda/TyKHe0dOBcVjbCPlp1N6uwFZ0doXev7f/91YDpU9bqDCFeBLA==", "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"optional": true, "optional": true,
"requires": { "requires": {
"@cypress/request": "^2.88.10", "@cypress/request": "^2.88.10",

View File

@ -109,8 +109,8 @@
"typescript": "~4.6.4" "typescript": "~4.6.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"@cypress/schematic": "~2.3.0", "@cypress/schematic": "^2.4.0",
"cypress": "^11.2.0", "cypress": "^12.1.0",
"cypress-fail-on-console-error": "~4.0.2", "cypress-fail-on-console-error": "~4.0.2",
"cypress-wait-until": "^1.7.2", "cypress-wait-until": "^1.7.2",
"mock-socket": "~9.1.5", "mock-socket": "~9.1.5",

View File

@ -5,7 +5,7 @@ let PROXY_CONFIG = require('./proxy.conf');
PROXY_CONFIG.forEach(entry => { PROXY_CONFIG.forEach(entry => {
entry.target = entry.target.replace("mempool.space", "mempool-staging.tk7.mempool.space"); entry.target = entry.target.replace("mempool.space", "mempool-staging.tk7.mempool.space");
entry.target = entry.target.replace("liquid.network", "liquid-staging.tk7.mempool.space"); entry.target = entry.target.replace("liquid.network", "liquid-staging.tk7.mempool.space");
entry.target = entry.target.replace("bisq.markets", "bisq-staging.tk7.mempool.space"); entry.target = entry.target.replace("bisq.markets", "bisq-staging.fra.mempool.space");
}); });
module.exports = PROXY_CONFIG; module.exports = PROXY_CONFIG;

View File

@ -118,7 +118,7 @@ export class NodesNetworksChartComponent implements OnInit {
color: 'grey', color: 'grey',
fontSize: 15 fontSize: 15
}, },
text: $localize`Indexing in progess`, text: $localize`Indexing in progress`,
left: 'center', left: 'center',
top: 'center', top: 'center',
}; };

View File

@ -109,7 +109,7 @@ export class LightningStatisticsChartComponent implements OnInit {
color: 'grey', color: 'grey',
fontSize: 15 fontSize: 15
}, },
text: $localize`Indexing in progess`, text: $localize`Indexing in progress`,
left: 'center', left: 'center',
top: 'center' top: 'center'
}; };