Merge branch 'master' into mononaut/zero-value-tx-diagrams

This commit is contained in:
wiz 2022-12-26 12:24:09 +09:00 committed by GitHub
commit 1348e2318d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 357 additions and 218 deletions

View File

@ -1,20 +1,32 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: weekly
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: daily
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]

View File

@ -2,7 +2,7 @@ name: Cypress Tests
on:
pull_request:
types: [ opened, review_requested, synchronize ]
types: [opened, review_requested, synchronize]
jobs:
cypress:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
@ -24,36 +24,36 @@ jobs:
- module: "bisq"
spec: |
cypress/e2e/bisq/bisq.spec.ts
name: E2E tests for ${{ matrix.module }}
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
path: ${{ matrix.module }}
- name: Setup node
uses: actions/setup-node@v2
uses: actions/setup-node@v3
with:
node-version: 16.15.0
cache: 'npm'
cache: "npm"
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
- name: Chrome browser tests (${{ matrix.module }})
uses: cypress-io/github-action@v4
uses: cypress-io/github-action@v5
with:
tag: ${{ github.event_name }}
working-directory: ${{ matrix.module }}/frontend
build: npm run config:defaults:${{ matrix.module }}
start: npm run start:local-staging
wait-on: 'http://localhost:4200'
wait-on: "http://localhost:4200"
wait-on-timeout: 120
record: true
parallel: true
spec: ${{ matrix.spec }}
group: Tests on Chrome (${{ matrix.module }})
browser: "chrome"
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
ci-build-id: "${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}"
env:
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}

View File

@ -85,7 +85,8 @@
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
},
"LND": {
"TLS_CERT_PATH": "tls.cert",

View File

@ -101,7 +101,8 @@
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
},
"LND": {
"TLS_CERT_PATH": "",

View File

@ -742,7 +742,7 @@ class Blocks {
public async $indexCPFP(hash: string, height: number): Promise<void> {
let transactions;
if (false/*Common.blocksSummariesIndexingEnabled()*/) {
if (Common.blocksSummariesIndexingEnabled()) {
transactions = await this.$getStrippedBlockTransactions(hash);
const rawBlock = await bitcoinApi.$getRawBlock(hash);
const block = Block.fromBuffer(rawBlock);
@ -751,10 +751,11 @@ class Blocks {
txMap[tx.getId()] = tx;
}
for (const tx of transactions) {
// convert from bitcoinjs to esplora vin format
if (txMap[tx.txid]?.ins) {
tx.vin = txMap[tx.txid].ins.map(vin => {
return {
txid: vin.hash
txid: vin.hash.slice().reverse().toString('hex')
};
});
}
@ -763,6 +764,7 @@ class Blocks {
const block = await bitcoinClient.getBlock(hash, 2);
transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx;
});
}
@ -778,9 +780,9 @@ class Blocks {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = (totalFee * 100_000_000) / totalVSize;
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: (tx.fee || 0) * 100_000_000 }; }), effectiveFeePerVsize);
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }), effectiveFeePerVsize);
for (const tx of cluster) {
await transactionRepository.$setCluster(tx.txid, cluster[0].txid);
}

View File

@ -538,6 +538,10 @@ class NodesApi {
const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {};
const intISPIds: number[] = [];

View File

@ -1,17 +1,14 @@
import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
import { MempoolBlock, TransactionExtended, ThreadTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import { StaticPool } from 'node-worker-threads-pool';
import { Worker } from 'worker_threads';
import path from 'path';
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private makeTemplatesPool = new StaticPool({
size: 1,
task: path.resolve(__dirname, './tx-selection-worker.js'),
});
private txSelectionWorker: Worker | null = null;
constructor() {}
@ -146,27 +143,159 @@ class MempoolBlocks {
return mempoolBlockDeltas;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> {
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest });
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
// copy CPFP info across to main thread's mempool
Object.keys(newMempool).forEach((txid) => {
if (newMempool[txid] && mempool[txid]) {
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize;
newMempool[txid].ancestors = mempool[txid].ancestors;
newMempool[txid].descendants = mempool[txid].descendants;
newMempool[txid].bestDescendant = mempool[txid].bestDescendant;
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }): Promise<void> {
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
Object.values(newMempool).forEach(entry => {
strippedMempool[entry.txid] = {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
});
this.mempoolBlocks = blocks;
// (re)initialize tx selection worker thread
if (!this.txSelectionWorker) {
this.txSelectionWorker = new Worker(path.resolve(__dirname, './tx-selection-worker.js'));
// if the thread throws an unexpected error, or exits for any other reason,
// reset worker state so that it will be re-initialized on the next run
this.txSelectionWorker.once('error', () => {
this.txSelectionWorker = null;
});
this.txSelectionWorker.once('exit', () => {
this.txSelectionWorker = null;
});
}
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
this.processBlockTemplates(newMempool, blocks, clusters);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
} catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
public async updateBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, added: TransactionExtended[], removed: string[]): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
return this.makeBlockTemplates(newMempool);
}
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: ThreadTransaction[] = added.map(entry => {
return {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
});
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
this.processBlockTemplates(newMempool, blocks, clusters);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
private processBlockTemplates(mempool, blocks, clusters): void {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
if (tx.cpfpRoot && tx.cpfpRoot in clusters) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (txid === tx.txid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: mempool[txid].weight,
};
if (matched) {
descendants.push(relative);
} else {
ancestors.push(relative);
}
}
});
mempool[tx.txid].ancestors = ancestors;
mempool[tx.txid].descendants = descendants;
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
}
});
});
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), undefined, undefined, blockIndex);
});
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, mempoolBlocks);
this.mempoolBlocks = mempoolBlocks;
this.mempoolBlockDeltas = deltas;
}
private dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
blockSize: number | undefined, blockWeight: number | undefined, blocksIndex: number): MempoolBlockWithTransactions {
let totalSize = blockSize || 0;
let totalWeight = blockWeight || 0;
if (blockSize === undefined && blockWeight === undefined) {
totalSize = 0;
totalWeight = 0;
transactions.forEach(tx => {
totalSize += tx.size;
totalWeight += tx.weight;
});
}
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
@ -177,8 +306,8 @@ class MempoolBlocks {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
blockSize: totalSize,
blockVSize: totalWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),

View File

@ -21,7 +21,7 @@ class Mempool {
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
deletedTransactions: TransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;

View File

@ -1,17 +1,30 @@
import config from '../config';
import logger from '../logger';
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { ThreadTransaction, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
let mempool: { [txid: string]: ThreadTransaction } = {};
if (parentPort) {
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => {
const { mempool, blocks } = makeBlockTemplates(params);
parentPort.on('message', (params) => {
if (params.type === 'set') {
mempool = params.mempool;
} else if (params.type === 'update') {
params.added.forEach(tx => {
mempool[tx.txid] = tx;
});
params.removed.forEach(txid => {
delete mempool[txid];
});
}
const { blocks, clusters } = makeBlockTemplates(mempool);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ mempool, blocks });
parentPort.postMessage({ blocks, clusters });
}
});
}
@ -19,35 +32,24 @@ if (parentPort) {
/*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*
* blockLimit: number of blocks to build in total.
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
* condenseRest: whether to ignore excess transactions or append them to the final block.
*/
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null })
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } {
function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
: { blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = [];
const restOfArray: TransactionExtended[] = [];
const restOfArray: ThreadTransaction[] = [];
const cpfpClusters: { [root: string]: string[] } = {};
let weight = 0;
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
weight += tx.weight;
if (weight >= maxWeight) {
restOfArray.push(tx);
return;
}
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
fee: tx.fee,
size: tx.size,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
effectiveFeePerVsize: tx.feePerVsize,
vin: tx.vin,
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
@ -74,7 +76,7 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: MempoolBlockWithTransactions[] = [];
const blocks: ThreadTransaction[][] = [];
let blockWeight = 4000;
let blockSize = 0;
let transactions: AuditTransaction[] = [];
@ -82,7 +84,7 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
while ((top < mempoolArray.length || !modified.isEmpty())) {
// skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++;
@ -106,44 +108,36 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
blockWeight += nextTx.ancestorWeight;
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
const descendants: AuditTransaction[] = [];
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
let isCluster = false;
if (sortedTxSet.length > 1) {
cpfpClusters[nextTx.txid] = sortedTxSet.map(tx => tx.txid);
isCluster = true;
}
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
if (ancestor && !ancestor?.used) {
ancestor.used = true;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.ancestors = sortedTxSet.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
}).reverse();
mempoolTx.descendants = descendants.map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
descendants.push(ancestor);
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
if (isCluster) {
mempoolTx.cpfpRoot = nextTx.txid;
}
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
// remove these as valid package ancestors for any descendants remaining in the mempool
if (sortedTxSet.length) {
sortedTxSet.forEach(tx => {
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
});
}
@ -159,10 +153,10 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
// this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) {
if ((exceededPackageTries || queueEmpty) && blocks.length < 7) {
// construct this block
if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
blocks.push(transactions.map(t => mempool[t.txid]));
}
// reset for the next block
transactions = [];
@ -181,55 +175,40 @@ function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }:
overflow = [];
}
}
if (condenseRest) {
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
blockSize += tx.size;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
return {
txid: a.txid,
fee: a.fee,
weight: a.weight,
};
});
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
blockSize += tx.size;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
tx.ancestors = [];
tx.bestDescendant = null;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
blockWeight += tx.weight;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
if (tx.ancestorMap.size > 0) {
cpfpClusters[tx.txid] = Array.from(tx.ancestorMap?.values()).map(a => a.txid);
mempoolTx.cpfpRoot = tx.txid;
}
transactions = [];
} else if (transactions.length) {
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(blockTransactions);
}
transactions = [];
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return {
mempool,
blocks
};
return { blocks, clusters: cpfpClusters };
}
// traverse in-mempool ancestors
@ -239,9 +218,9 @@ function setRelatives(
mempool: { [txid: string]: AuditTransaction },
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent.txid];
if (parentTx && !tx.ancestorMap?.has(parent.txid)) {
tx.ancestorMap.set(parent.txid, parentTx);
const parentTx = mempool[parent];
if (parentTx && !tx.ancestorMap?.has(parent)) {
tx.ancestorMap.set(parent, parentTx);
parentTx.children.add(tx);
// visit each node only once
if (!parentTx.relativesSet) {
@ -312,27 +291,4 @@ function updateDescendants(
});
}
}
}
function dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}

View File

@ -251,7 +251,7 @@ class WebsocketHandler {
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true);
await mempoolBlocks.updateBlockTemplates(newMempool, newTransactions, deletedTransactions.map(tx => tx.txid));
} else {
mempoolBlocks.updateMempoolBlocks(newMempool);
}
@ -419,7 +419,7 @@ class WebsocketHandler {
const _memPool = memPool.getMempool();
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
await mempoolBlocks.makeBlockTemplates(_memPool);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}
@ -439,7 +439,7 @@ class WebsocketHandler {
};
}) : [];
BlocksSummariesRepository.$saveSummary({
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
@ -462,13 +462,15 @@ class WebsocketHandler {
}
}
const removed: string[] = [];
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
removed.push(txId);
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
await mempoolBlocks.updateBlockTemplates(_memPool, [], removed);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool);
}

View File

@ -44,6 +44,7 @@ interface IConfig {
GRAPH_REFRESH_INTERVAL: number;
LOGGER_UPDATE_INTERVAL: number;
FORENSICS_INTERVAL: number;
FORENSICS_RATE_LIMIT: number;
};
LND: {
TLS_CERT_PATH: string;
@ -205,6 +206,7 @@ const defaults: IConfig = {
'GRAPH_REFRESH_INTERVAL': 600,
'LOGGER_UPDATE_INTERVAL': 30,
'FORENSICS_INTERVAL': 43200,
'FORENSICS_RATE_LIMIT': 20,
},
'LND': {
'TLS_CERT_PATH': '',

View File

@ -81,10 +81,10 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
export interface AuditTransaction {
txid: string;
fee: number;
size: number;
weight: number;
feePerVsize: number;
vin: IEsploraApi.Vin[];
effectiveFeePerVsize: number;
vin: string[];
relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>;
children: Set<AuditTransaction>;
@ -96,6 +96,17 @@ export interface AuditTransaction {
modifiedNode: HeapNode<AuditTransaction>;
}
export interface ThreadTransaction {
txid: string;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize?: number;
vin: string[];
cpfpRoot?: string;
cpfpChecked?: boolean;
}
export interface Ancestor {
txid: string;
weight: number;

View File

@ -17,19 +17,16 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
const blockId = params.mined?.id ?? params.template?.id;
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id;
try {
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
if (dbSummary.length === 0) { // First insertion
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
]);
} else if (params.mined !== undefined) { // Update mined block summary
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
} else if (params.template !== undefined) { // Update template block summary
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
}
const transactions = JSON.stringify(params.mined?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
transactions = ?
`, [params.height, blockId, transactions, '[]', transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
@ -40,6 +37,26 @@ class BlocksSummariesRepository {
}
}
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
const blockId = params.template?.id;
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
}
public async $getIndexedSummariesId(): Promise<string[]> {
try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);

View File

@ -44,7 +44,9 @@ class TransactionRepository {
const [rows]: any = await DB.query(query, [txid]);
if (rows.length) {
rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[];
return this.convertCpfp(rows[0]);
if (rows[0]?.txs?.length) {
return this.convertCpfp(rows[0]);
}
}
} catch (e) {
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));

View File

@ -7,7 +7,6 @@ import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { Common } from '../../api/common';
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
const throttleDelay = 20; //ms
const tempCacheSize = 10000;
class ForensicsService {
@ -91,7 +90,7 @@ class ForensicsService {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
await Common.sleep$(throttleDelay);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue;
@ -340,7 +339,7 @@ class ForensicsService {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(input.txid);
await Common.sleep$(throttleDelay);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
}
@ -429,7 +428,7 @@ class ForensicsService {
if (temp) {
this.tempCached.push(txid);
}
await Common.sleep$(throttleDelay);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
return null;

View File

@ -31,6 +31,7 @@
"bootstrap": "~4.6.1",
"browserify": "^17.0.0",
"clipboard": "^2.0.11",
"cypress": "^12.1.0",
"domino": "^2.1.6",
"echarts": "~5.4.0",
"echarts-gl": "^2.0.9",
@ -57,8 +58,8 @@
"typescript": "~4.6.4"
},
"optionalDependencies": {
"@cypress/schematic": "~2.3.0",
"cypress": "^11.2.0",
"@cypress/schematic": "^2.4.0",
"cypress": "^12.1.0",
"cypress-fail-on-console-error": "~4.0.2",
"cypress-wait-until": "^1.7.2",
"mock-socket": "~9.1.5",
@ -3225,9 +3226,9 @@
}
},
"node_modules/@cypress/schematic": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.3.0.tgz",
"integrity": "sha512-LBKX20MUUYF2Xu+1+KpVbLCoMvt2Osa80yQfonduVsLJ/p8JxtLHqufuf/ryJp9Gm9R5sDfk/YhHL+rB7a+gsg==",
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.4.0.tgz",
"integrity": "sha512-aor8hQ+gMXqx/ASdo7CUGo/sMEWwwfSRsLr99rM2GjvW+pZnCKKTnRG4UPf8Ro9SevLJj7KRZAZWxa5MAkJzZA==",
"optional": true,
"dependencies": {
"@angular-devkit/architect": "^0.1402.1",
@ -7019,9 +7020,9 @@
"peer": true
},
"node_modules/cypress": {
"version": "11.2.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-11.2.0.tgz",
"integrity": "sha512-u61UGwtu7lpsNWLUma/FKNOsrjcI6wleNmda/TyKHe0dOBcVjbCPlp1N6uwFZ0doXev7f/91YDpU9bqDCFeBLA==",
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
@ -7072,7 +7073,7 @@
"cypress": "bin/cypress"
},
"engines": {
"node": ">=12.0.0"
"node": "^14.0.0 || ^16.0.0 || >=18.0.0"
}
},
"node_modules/cypress-fail-on-console-error": {
@ -19345,9 +19346,9 @@
}
},
"@cypress/schematic": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.3.0.tgz",
"integrity": "sha512-LBKX20MUUYF2Xu+1+KpVbLCoMvt2Osa80yQfonduVsLJ/p8JxtLHqufuf/ryJp9Gm9R5sDfk/YhHL+rB7a+gsg==",
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/@cypress/schematic/-/schematic-2.4.0.tgz",
"integrity": "sha512-aor8hQ+gMXqx/ASdo7CUGo/sMEWwwfSRsLr99rM2GjvW+pZnCKKTnRG4UPf8Ro9SevLJj7KRZAZWxa5MAkJzZA==",
"optional": true,
"requires": {
"@angular-devkit/architect": "^0.1402.1",
@ -22282,9 +22283,9 @@
"peer": true
},
"cypress": {
"version": "11.2.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-11.2.0.tgz",
"integrity": "sha512-u61UGwtu7lpsNWLUma/FKNOsrjcI6wleNmda/TyKHe0dOBcVjbCPlp1N6uwFZ0doXev7f/91YDpU9bqDCFeBLA==",
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"optional": true,
"requires": {
"@cypress/request": "^2.88.10",

View File

@ -109,8 +109,8 @@
"typescript": "~4.6.4"
},
"optionalDependencies": {
"@cypress/schematic": "~2.3.0",
"cypress": "^11.2.0",
"@cypress/schematic": "^2.4.0",
"cypress": "^12.1.0",
"cypress-fail-on-console-error": "~4.0.2",
"cypress-wait-until": "^1.7.2",
"mock-socket": "~9.1.5",

View File

@ -5,7 +5,7 @@ let PROXY_CONFIG = require('./proxy.conf');
PROXY_CONFIG.forEach(entry => {
entry.target = entry.target.replace("mempool.space", "mempool-staging.tk7.mempool.space");
entry.target = entry.target.replace("liquid.network", "liquid-staging.tk7.mempool.space");
entry.target = entry.target.replace("bisq.markets", "bisq-staging.tk7.mempool.space");
entry.target = entry.target.replace("bisq.markets", "bisq-staging.fra.mempool.space");
});
module.exports = PROXY_CONFIG;

View File

@ -118,7 +118,7 @@ export class NodesNetworksChartComponent implements OnInit {
color: 'grey',
fontSize: 15
},
text: $localize`Indexing in progess`,
text: $localize`Indexing in progress`,
left: 'center',
top: 'center',
};

View File

@ -109,7 +109,7 @@ export class LightningStatisticsChartComponent implements OnInit {
color: 'grey',
fontSize: 15
},
text: $localize`Indexing in progess`,
text: $localize`Indexing in progress`,
left: 'center',
top: 'center'
};