mirror of
https://github.com/mempool/mempool.git
synced 2025-02-24 14:50:52 +01:00
Merge pull request #2621 from mononaut/projected-block-templates
WIP: new transaction selection algorithm & scoring for block audits
This commit is contained in:
commit
2cbb7231a7
5 changed files with 650 additions and 53 deletions
118
backend/src/api/audit.ts
Normal file
118
backend/src/api/audit.ts
Normal file
|
@ -0,0 +1,118 @@
|
|||
import logger from '../logger';
|
||||
import { BlockExtended, TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
|
||||
|
||||
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
|
||||
|
||||
class Audit {
|
||||
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
|
||||
: { censored: string[], added: string[], score: number } {
|
||||
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
|
||||
return { censored: [], added: [], score: 0 };
|
||||
}
|
||||
|
||||
const matches: string[] = []; // present in both mined block and template
|
||||
const added: string[] = []; // present in mined block, not in template
|
||||
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
|
||||
const isCensored = {}; // missing, without excuse
|
||||
const isDisplaced = {};
|
||||
let displacedWeight = 0;
|
||||
|
||||
const inBlock = {};
|
||||
const inTemplate = {};
|
||||
|
||||
const now = Math.round((Date.now() / 1000));
|
||||
for (const tx of transactions) {
|
||||
inBlock[tx.txid] = tx;
|
||||
}
|
||||
// coinbase is always expected
|
||||
if (transactions[0]) {
|
||||
inTemplate[transactions[0].txid] = true;
|
||||
}
|
||||
// look for transactions that were expected in the template, but missing from the mined block
|
||||
for (const txid of projectedBlocks[0].transactionIds) {
|
||||
if (!inBlock[txid]) {
|
||||
// tx is recent, may have reached the miner too late for inclusion
|
||||
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
|
||||
fresh.push(txid);
|
||||
} else {
|
||||
isCensored[txid] = true;
|
||||
}
|
||||
displacedWeight += mempool[txid].weight;
|
||||
}
|
||||
inTemplate[txid] = true;
|
||||
}
|
||||
|
||||
displacedWeight += (4000 - transactions[0].weight);
|
||||
|
||||
logger.warn(`${fresh.length} fresh, ${Object.keys(isCensored).length} possibly censored, ${displacedWeight} displaced weight`);
|
||||
|
||||
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
|
||||
// these displaced transactions should occupy the first N weight units of the next projected block
|
||||
let displacedWeightRemaining = displacedWeight;
|
||||
let index = 0;
|
||||
let lastFeeRate = Infinity;
|
||||
let failures = 0;
|
||||
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
|
||||
const txid = projectedBlocks[1].transactionIds[index];
|
||||
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
|
||||
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
|
||||
if (fits || feeMatches) {
|
||||
isDisplaced[txid] = true;
|
||||
if (fits) {
|
||||
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
|
||||
}
|
||||
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
|
||||
displacedWeightRemaining -= mempool[txid].weight;
|
||||
}
|
||||
failures = 0;
|
||||
} else {
|
||||
failures++;
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
// mark unexpected transactions in the mined block as 'added'
|
||||
let overflowWeight = 0;
|
||||
for (const tx of transactions) {
|
||||
if (inTemplate[tx.txid]) {
|
||||
matches.push(tx.txid);
|
||||
} else {
|
||||
if (!isDisplaced[tx.txid]) {
|
||||
added.push(tx.txid);
|
||||
}
|
||||
overflowWeight += tx.weight;
|
||||
}
|
||||
}
|
||||
|
||||
// transactions missing from near the end of our template are probably not being censored
|
||||
let overflowWeightRemaining = overflowWeight;
|
||||
let lastOverflowRate = 1.00;
|
||||
index = projectedBlocks[0].transactionIds.length - 1;
|
||||
while (index >= 0) {
|
||||
const txid = projectedBlocks[0].transactionIds[index];
|
||||
if (overflowWeightRemaining > 0) {
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
lastOverflowRate = mempool[txid].effectiveFeePerVsize;
|
||||
} else if (Math.floor(mempool[txid].effectiveFeePerVsize * 100) <= Math.ceil(lastOverflowRate * 100)) { // tolerance of 0.01 sat/vb
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
}
|
||||
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
|
||||
index--;
|
||||
}
|
||||
|
||||
const numCensored = Object.keys(isCensored).length;
|
||||
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
|
||||
|
||||
return {
|
||||
censored: Object.keys(isCensored),
|
||||
added,
|
||||
score
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new Audit();
|
|
@ -1,7 +1,8 @@
|
|||
import logger from '../logger';
|
||||
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
|
||||
import { MempoolBlock, TransactionExtended, AuditTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
import config from '../config';
|
||||
import { PairingHeap } from '../utils/pairing-heap';
|
||||
|
||||
class MempoolBlocks {
|
||||
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
|
@ -72,6 +73,7 @@ class MempoolBlocks {
|
|||
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
|
||||
|
||||
this.mempoolBlocks = blocks;
|
||||
this.mempoolBlockDeltas = deltas;
|
||||
}
|
||||
|
@ -99,6 +101,7 @@ class MempoolBlocks {
|
|||
if (transactions.length) {
|
||||
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
|
||||
}
|
||||
|
||||
// Calculate change from previous block states
|
||||
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
|
||||
let added: TransactionStripped[] = [];
|
||||
|
@ -132,12 +135,286 @@ class MempoolBlocks {
|
|||
removed
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
blocks: mempoolBlocks,
|
||||
deltas: mempoolBlockDeltas
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*
|
||||
* blockLimit: number of blocks to build in total.
|
||||
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
|
||||
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
|
||||
* condenseRest: whether to ignore excess transactions or append them to the final block.
|
||||
*/
|
||||
public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): MempoolBlockWithTransactions[] {
|
||||
const start = Date.now();
|
||||
const auditPool: { [txid: string]: AuditTransaction } = {};
|
||||
const mempoolArray: AuditTransaction[] = [];
|
||||
const restOfArray: TransactionExtended[] = [];
|
||||
|
||||
let weight = 0;
|
||||
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
|
||||
// grab the top feerate txs up to maxWeight
|
||||
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
|
||||
weight += tx.weight;
|
||||
if (weight >= maxWeight) {
|
||||
restOfArray.push(tx);
|
||||
return;
|
||||
}
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
auditPool[tx.txid] = {
|
||||
txid: tx.txid,
|
||||
fee: tx.fee,
|
||||
size: tx.size,
|
||||
weight: tx.weight,
|
||||
feePerVsize: tx.feePerVsize,
|
||||
vin: tx.vin,
|
||||
relativesSet: false,
|
||||
ancestorMap: new Map<string, AuditTransaction>(),
|
||||
children: new Set<AuditTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorWeight: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
modifiedNode: null,
|
||||
}
|
||||
mempoolArray.push(auditPool[tx.txid]);
|
||||
})
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
this.setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: MempoolBlockWithTransactions[] = [];
|
||||
let blockWeight = 4000;
|
||||
let blockSize = 0;
|
||||
let transactions: AuditTransaction[] = [];
|
||||
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
|
||||
let overflow: AuditTransaction[] = [];
|
||||
let failures = 0;
|
||||
let top = 0;
|
||||
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
|
||||
// skip invalid transactions
|
||||
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
|
||||
top++;
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[top];
|
||||
const nextModifiedTx = modified.peek();
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
top++;
|
||||
} else {
|
||||
modified.pop();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
nextTx.modifiedNode = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
|
||||
blockWeight += nextTx.ancestorWeight;
|
||||
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
|
||||
sortedTxSet.forEach((ancestor, i, arr) => {
|
||||
const mempoolTx = mempool[ancestor.txid];
|
||||
if (ancestor && !ancestor?.used) {
|
||||
ancestor.used = true;
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
|
||||
mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
}
|
||||
})
|
||||
if (i < arr.length - 1) {
|
||||
mempoolTx.bestDescendant = {
|
||||
txid: arr[arr.length - 1].txid,
|
||||
fee: arr[arr.length - 1].fee,
|
||||
weight: arr[arr.length - 1].weight,
|
||||
};
|
||||
}
|
||||
transactions.push(ancestor);
|
||||
blockSize += ancestor.size;
|
||||
}
|
||||
});
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (sortedTxSet.length) {
|
||||
sortedTxSet.forEach(tx => {
|
||||
this.updateDescendants(tx, auditPool, modified);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
|
||||
if (exceededPackageTries && (!condenseRest || blocks.length < blockLimit - 1)) {
|
||||
// construct this block
|
||||
if (transactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
// reset for the next block
|
||||
transactions = [];
|
||||
blockSize = 0;
|
||||
blockWeight = 4000;
|
||||
|
||||
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
|
||||
for (const overflowTx of overflow.reverse()) {
|
||||
if (overflowTx.modified) {
|
||||
overflowTx.modifiedNode = modified.add(overflowTx);
|
||||
} else {
|
||||
top--;
|
||||
mempoolArray[top] = overflowTx;
|
||||
}
|
||||
}
|
||||
overflow = [];
|
||||
}
|
||||
}
|
||||
if (condenseRest) {
|
||||
// pack any leftover transactions into the last block
|
||||
for (const tx of overflow) {
|
||||
if (!tx || tx?.used) {
|
||||
continue;
|
||||
}
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
transactions.push(tx);
|
||||
tx.used = true;
|
||||
}
|
||||
const blockTransactions = transactions.map(t => mempool[t.txid])
|
||||
restOfArray.forEach(tx => {
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
blockTransactions.push(tx);
|
||||
});
|
||||
if (blockTransactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
transactions = [];
|
||||
} else if (transactions.length) {
|
||||
blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
|
||||
const end = Date.now();
|
||||
const time = end - start;
|
||||
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
public setRelatives(
|
||||
tx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
): void {
|
||||
for (const parent of tx.vin) {
|
||||
const parentTx = mempool[parent.txid];
|
||||
if (parentTx && !tx.ancestorMap!.has(parent.txid)) {
|
||||
tx.ancestorMap.set(parent.txid, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
this.setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = tx.fee || 0;
|
||||
tx.ancestorWeight = tx.weight || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += ancestor.fee;
|
||||
tx.ancestorWeight += ancestor.weight;
|
||||
});
|
||||
tx.score = tx.ancestorFee / (tx.ancestorWeight || 1);
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
private updateDescendants(
|
||||
rootTx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
modified: PairingHeap<AuditTransaction>,
|
||||
): void {
|
||||
const descendantSet: Set<AuditTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: AuditTransaction[] = [];
|
||||
let descendantTx;
|
||||
let ancestorIndex;
|
||||
let tmpScore;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= rootTx.fee;
|
||||
descendantTx.ancestorWeight -= rootTx.weight;
|
||||
tmpScore = descendantTx.score;
|
||||
descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorWeight;
|
||||
|
||||
if (!descendantTx.modifiedNode) {
|
||||
descendantTx.modified = true;
|
||||
descendantTx.modifiedNode = modified.add(descendantTx);
|
||||
} else {
|
||||
// rebalance modified heap if score has changed
|
||||
if (descendantTx.score < tmpScore) {
|
||||
modified.decreasePriority(descendantTx.modifiedNode);
|
||||
} else if (descendantTx.score > tmpScore) {
|
||||
modified.increasePriority(descendantTx.modifiedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private dataToMempoolBlocks(transactions: TransactionExtended[],
|
||||
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
|
||||
let rangeLength = 4;
|
||||
|
|
|
@ -18,6 +18,7 @@ import difficultyAdjustment from './difficulty-adjustment';
|
|||
import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import Audit from './audit';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
|
@ -405,7 +406,7 @@ class WebsocketHandler {
|
|||
});
|
||||
}
|
||||
|
||||
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) {
|
||||
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): void {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
@ -414,66 +415,54 @@ class WebsocketHandler {
|
|||
let mBlockDeltas: undefined | MempoolBlockDelta[];
|
||||
let matchRate = 0;
|
||||
const _memPool = memPool.getMempool();
|
||||
const _mempoolBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
|
||||
if (_mempoolBlocks[0]) {
|
||||
const matches: string[] = [];
|
||||
const added: string[] = [];
|
||||
const missing: string[] = [];
|
||||
if (Common.indexingEnabled()) {
|
||||
const mempoolCopy = cloneMempool(_memPool);
|
||||
const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2);
|
||||
|
||||
for (const txId of txIds) {
|
||||
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
|
||||
matches.push(txId);
|
||||
} else {
|
||||
added.push(txId);
|
||||
const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, mempoolCopy);
|
||||
matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
}) : [];
|
||||
|
||||
BlocksSummariesRepository.$saveSummary({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
delete _memPool[txId];
|
||||
}
|
||||
});
|
||||
|
||||
for (const txId of _mempoolBlocks[0].transactionIds) {
|
||||
if (matches.includes(txId) || added.includes(txId)) {
|
||||
continue;
|
||||
}
|
||||
missing.push(txId);
|
||||
}
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: censored,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
|
||||
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
});
|
||||
BlocksSummariesRepository.$saveSummary({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
});
|
||||
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: missing,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
if (block.extras) {
|
||||
block.extras.matchRate = matchRate;
|
||||
}
|
||||
}
|
||||
|
||||
if (block.extras) {
|
||||
block.extras.matchRate = matchRate;
|
||||
// Update mempool to remove transactions included in the new block
|
||||
for (const txId of txIds) {
|
||||
delete _memPool[txId];
|
||||
}
|
||||
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
const fees = feeApi.getRecommendedFee();
|
||||
|
||||
|
@ -580,4 +569,14 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
function cloneMempool(mempool: { [txid: string]: TransactionExtended }): { [txid: string]: TransactionExtended } {
|
||||
const cloned = {};
|
||||
Object.keys(mempool).forEach(id => {
|
||||
cloned[id] = {
|
||||
...mempool[id]
|
||||
};
|
||||
});
|
||||
return cloned;
|
||||
}
|
||||
|
||||
export default new WebsocketHandler();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
|
||||
import { HeapNode } from "./utils/pairing-heap";
|
||||
|
||||
export interface PoolTag {
|
||||
id: number; // mysql row id
|
||||
|
@ -70,12 +71,40 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
|
|||
deleteAfter?: number;
|
||||
}
|
||||
|
||||
interface Ancestor {
|
||||
export interface AuditTransaction {
|
||||
txid: string;
|
||||
fee: number;
|
||||
size: number;
|
||||
weight: number;
|
||||
feePerVsize: number;
|
||||
vin: IEsploraApi.Vin[];
|
||||
relativesSet: boolean;
|
||||
ancestorMap: Map<string, AuditTransaction>;
|
||||
children: Set<AuditTransaction>;
|
||||
ancestorFee: number;
|
||||
ancestorWeight: number;
|
||||
score: number;
|
||||
used: boolean;
|
||||
modified: boolean;
|
||||
modifiedNode: HeapNode<AuditTransaction>;
|
||||
}
|
||||
|
||||
export interface Ancestor {
|
||||
txid: string;
|
||||
weight: number;
|
||||
fee: number;
|
||||
}
|
||||
|
||||
export interface TransactionSet {
|
||||
fee: number;
|
||||
weight: number;
|
||||
score: number;
|
||||
children?: Set<string>;
|
||||
available?: boolean;
|
||||
modified?: boolean;
|
||||
modifiedNode?: HeapNode<string>;
|
||||
}
|
||||
|
||||
interface BestDescendant {
|
||||
txid: string;
|
||||
weight: number;
|
||||
|
|
174
backend/src/utils/pairing-heap.ts
Normal file
174
backend/src/utils/pairing-heap.ts
Normal file
|
@ -0,0 +1,174 @@
|
|||
export type HeapNode<T> = {
|
||||
element: T
|
||||
child?: HeapNode<T>
|
||||
next?: HeapNode<T>
|
||||
prev?: HeapNode<T>
|
||||
} | null | undefined;
|
||||
|
||||
// minimal pairing heap priority queue implementation
|
||||
export class PairingHeap<T> {
|
||||
private root: HeapNode<T> = null;
|
||||
private comparator: (a: T, b: T) => boolean;
|
||||
|
||||
// comparator function should return 'true' if a is higher priority than b
|
||||
constructor(comparator: (a: T, b: T) => boolean) {
|
||||
this.comparator = comparator;
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return !this.root;
|
||||
}
|
||||
|
||||
add(element: T): HeapNode<T> {
|
||||
const node: HeapNode<T> = {
|
||||
element
|
||||
};
|
||||
|
||||
this.root = this.meld(this.root, node);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
// returns the top priority element without modifying the queue
|
||||
peek(): T | void {
|
||||
return this.root?.element;
|
||||
}
|
||||
|
||||
// removes and returns the top priority element
|
||||
pop(): T | void {
|
||||
let element;
|
||||
if (this.root) {
|
||||
const node = this.root;
|
||||
element = node.element;
|
||||
this.root = this.mergePairs(node.child);
|
||||
}
|
||||
return element;
|
||||
}
|
||||
|
||||
deleteNode(node: HeapNode<T>): void {
|
||||
if (!node) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (node === this.root) {
|
||||
this.root = this.mergePairs(node.child);
|
||||
}
|
||||
else {
|
||||
if (node.prev) {
|
||||
if (node.prev.child === node) {
|
||||
node.prev.child = node.next;
|
||||
}
|
||||
else {
|
||||
node.prev.next = node.next;
|
||||
}
|
||||
}
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
}
|
||||
this.root = this.meld(this.root, this.mergePairs(node.child));
|
||||
}
|
||||
|
||||
node.child = null;
|
||||
node.prev = null;
|
||||
node.next = null;
|
||||
}
|
||||
|
||||
// fix the heap after increasing the priority of a given node
|
||||
increasePriority(node: HeapNode<T>): void {
|
||||
// already the top priority element
|
||||
if (!node || node === this.root) {
|
||||
return;
|
||||
}
|
||||
// extract from siblings
|
||||
if (node.prev) {
|
||||
if (node.prev?.child === node) {
|
||||
if (this.comparator(node.prev.element, node.element)) {
|
||||
// already in a valid position
|
||||
return;
|
||||
}
|
||||
node.prev.child = node.next;
|
||||
}
|
||||
else {
|
||||
node.prev.next = node.next;
|
||||
}
|
||||
}
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
}
|
||||
|
||||
this.root = this.meld(this.root, node);
|
||||
}
|
||||
|
||||
decreasePriority(node: HeapNode<T>): void {
|
||||
this.deleteNode(node);
|
||||
this.root = this.meld(this.root, node);
|
||||
}
|
||||
|
||||
meld(a: HeapNode<T>, b: HeapNode<T>): HeapNode<T> {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
if (!b || a === b) {
|
||||
return a;
|
||||
}
|
||||
|
||||
let parent: HeapNode<T> = b;
|
||||
let child: HeapNode<T> = a;
|
||||
if (this.comparator(a.element, b.element)) {
|
||||
parent = a;
|
||||
child = b;
|
||||
}
|
||||
|
||||
child.next = parent.child;
|
||||
if (parent.child) {
|
||||
parent.child.prev = child;
|
||||
}
|
||||
child.prev = parent;
|
||||
parent.child = child;
|
||||
|
||||
parent.next = null;
|
||||
parent.prev = null;
|
||||
|
||||
return parent;
|
||||
}
|
||||
|
||||
mergePairs(node: HeapNode<T>): HeapNode<T> {
|
||||
if (!node) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let current: HeapNode<T> = node;
|
||||
let next: HeapNode<T>;
|
||||
let nextCurrent: HeapNode<T>;
|
||||
let pairs: HeapNode<T>;
|
||||
let melded: HeapNode<T>;
|
||||
while (current) {
|
||||
next = current.next;
|
||||
if (next) {
|
||||
nextCurrent = next.next;
|
||||
melded = this.meld(current, next);
|
||||
if (melded) {
|
||||
melded.prev = pairs;
|
||||
}
|
||||
pairs = melded;
|
||||
}
|
||||
else {
|
||||
nextCurrent = null;
|
||||
current.prev = pairs;
|
||||
pairs = current;
|
||||
break;
|
||||
}
|
||||
current = nextCurrent;
|
||||
}
|
||||
|
||||
melded = null;
|
||||
let prev: HeapNode<T>;
|
||||
while (pairs) {
|
||||
prev = pairs.prev;
|
||||
melded = this.meld(melded, pairs);
|
||||
pairs = prev;
|
||||
}
|
||||
|
||||
return melded;
|
||||
}
|
||||
}
|
Loading…
Add table
Reference in a new issue