mirror of
https://github.com/mempool/mempool.git
synced 2025-02-24 22:58:30 +01:00
Merge branch 'master' into orangesurf/2024-07-19
This commit is contained in:
commit
82360f5525
15 changed files with 858 additions and 809 deletions
|
@ -1,15 +1,14 @@
|
|||
import logger from '../../logger';
|
||||
import { MempoolTransactionExtended } from '../../mempool.interfaces';
|
||||
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
|
||||
import { GraphTx, getSameBlockRelatives, initializeRelatives, makeBlockTemplate, mempoolComparator, removeAncestors, setAncestorScores } from '../mini-miner';
|
||||
|
||||
const BLOCK_WEIGHT_UNITS = 4_000_000;
|
||||
const BLOCK_SIGOPS = 80_000;
|
||||
const MAX_RELATIVE_GRAPH_SIZE = 200;
|
||||
const BID_BOOST_WINDOW = 40_000;
|
||||
const BID_BOOST_MIN_OFFSET = 10_000;
|
||||
const BID_BOOST_MAX_OFFSET = 400_000;
|
||||
|
||||
type Acceleration = {
|
||||
export type Acceleration = {
|
||||
txid: string;
|
||||
max_bid: number;
|
||||
};
|
||||
|
@ -28,31 +27,6 @@ export interface AccelerationInfo {
|
|||
cost: number; // additional cost to accelerate ((cost + txSummary.effectiveFee) / txSummary.effectiveVsize) >= targetFeeRate
|
||||
}
|
||||
|
||||
interface GraphTx {
|
||||
txid: string;
|
||||
vsize: number;
|
||||
weight: number;
|
||||
fees: {
|
||||
base: number; // in sats
|
||||
};
|
||||
depends: string[];
|
||||
spentby: string[];
|
||||
}
|
||||
|
||||
interface MempoolTx extends GraphTx {
|
||||
ancestorcount: number;
|
||||
ancestorsize: number;
|
||||
fees: { // in sats
|
||||
base: number;
|
||||
ancestor: number;
|
||||
};
|
||||
|
||||
ancestors: Map<string, MempoolTx>,
|
||||
ancestorRate: number;
|
||||
individualRate: number;
|
||||
score: number;
|
||||
}
|
||||
|
||||
class AccelerationCosts {
|
||||
/**
|
||||
* Takes a list of accelerations and verbose block data
|
||||
|
@ -61,7 +35,7 @@ class AccelerationCosts {
|
|||
* @param accelerationsx
|
||||
* @param verboseBlock
|
||||
*/
|
||||
public calculateBoostRate(accelerations: Acceleration[], blockTxs: IEsploraApi.Transaction[]): number {
|
||||
public calculateBoostRate(accelerations: Acceleration[], blockTxs: MempoolTransactionExtended[]): number {
|
||||
// Run GBT ourselves to calculate accurate effective fee rates
|
||||
// the list of transactions comes from a mined block, so we already know everything fits within consensus limits
|
||||
const template = makeBlockTemplate(blockTxs, accelerations, 1, Infinity, Infinity);
|
||||
|
@ -176,94 +150,14 @@ class AccelerationCosts {
|
|||
*/
|
||||
public getAccelerationInfo(tx: MempoolTransactionExtended, targetFeeRate: number, transactions: MempoolTransactionExtended[]): AccelerationInfo {
|
||||
// Get same-block transaction ancestors
|
||||
const allRelatives = this.getSameBlockRelatives(tx, transactions);
|
||||
const relativesMap = this.initializeRelatives(allRelatives);
|
||||
const rootTx = relativesMap.get(tx.txid) as MempoolTx;
|
||||
const allRelatives = getSameBlockRelatives(tx, transactions);
|
||||
const relativesMap = initializeRelatives(allRelatives);
|
||||
const rootTx = relativesMap.get(tx.txid) as GraphTx;
|
||||
|
||||
// Calculate cost to boost
|
||||
return this.calculateAccelerationAncestors(rootTx, relativesMap, targetFeeRate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a raw transaction, and builds a graph of same-block relatives,
|
||||
* and returns as a MempoolTx
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
private getSameBlockRelatives(tx: MempoolTransactionExtended, transactions: MempoolTransactionExtended[]): Map<string, GraphTx> {
|
||||
const blockTxs = new Map<string, MempoolTransactionExtended>(); // map of txs in this block
|
||||
const spendMap = new Map<string, string>(); // map of outpoints to spending txids
|
||||
for (const tx of transactions) {
|
||||
blockTxs.set(tx.txid, tx);
|
||||
for (const vin of tx.vin) {
|
||||
spendMap.set(`${vin.txid}:${vin.vout}`, tx.txid);
|
||||
}
|
||||
}
|
||||
|
||||
const relatives: Map<string, GraphTx> = new Map();
|
||||
const stack: string[] = [tx.txid];
|
||||
|
||||
// build set of same-block ancestors
|
||||
while (stack.length > 0) {
|
||||
const nextTxid = stack.pop();
|
||||
const nextTx = nextTxid ? blockTxs.get(nextTxid) : null;
|
||||
if (!nextTx || relatives.has(nextTx.txid)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const mempoolTx = this.convertToGraphTx(nextTx);
|
||||
|
||||
mempoolTx.fees.base = nextTx.fee || 0;
|
||||
mempoolTx.depends = nextTx.vin.map(vin => vin.txid).filter(inTxid => inTxid && blockTxs.has(inTxid)) as string[];
|
||||
mempoolTx.spentby = nextTx.vout.map((vout, index) => spendMap.get(`${nextTx.txid}:${index}`)).filter(outTxid => outTxid && blockTxs.has(outTxid)) as string[];
|
||||
|
||||
for (const txid of [...mempoolTx.depends, ...mempoolTx.spentby]) {
|
||||
if (txid) {
|
||||
stack.push(txid);
|
||||
}
|
||||
}
|
||||
|
||||
relatives.set(mempoolTx.txid, mempoolTx);
|
||||
}
|
||||
|
||||
return relatives;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a raw transaction and converts it to MempoolTx format
|
||||
* fee and ancestor data is initialized with dummy/null values
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
private convertToGraphTx(tx: MempoolTransactionExtended): GraphTx {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: Math.ceil(tx.weight / 4),
|
||||
weight: tx.weight,
|
||||
fees: {
|
||||
base: 0, // dummy
|
||||
},
|
||||
depends: [], // dummy
|
||||
spentby: [], //dummy
|
||||
};
|
||||
}
|
||||
|
||||
private convertGraphToMempoolTx(tx: GraphTx): MempoolTx {
|
||||
return {
|
||||
...tx,
|
||||
fees: {
|
||||
base: tx.fees.base,
|
||||
ancestor: tx.fees.base,
|
||||
},
|
||||
ancestorcount: 1,
|
||||
ancestorsize: Math.ceil(tx.weight / 4),
|
||||
ancestors: new Map<string, MempoolTx>(),
|
||||
ancestorRate: 0,
|
||||
individualRate: 0,
|
||||
score: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a root transaction, a list of in-mempool ancestors, and a target fee rate,
|
||||
* Calculate the minimum set of transactions to fee-bump, their total vsize + fees
|
||||
|
@ -271,7 +165,7 @@ class AccelerationCosts {
|
|||
* @param tx
|
||||
* @param ancestors
|
||||
*/
|
||||
private calculateAccelerationAncestors(tx: MempoolTx, relatives: Map<string, MempoolTx>, targetFeeRate: number): AccelerationInfo {
|
||||
private calculateAccelerationAncestors(tx: GraphTx, relatives: Map<string, GraphTx>, targetFeeRate: number): AccelerationInfo {
|
||||
// add root tx to the ancestor map
|
||||
relatives.set(tx.txid, tx);
|
||||
|
||||
|
@ -283,12 +177,12 @@ class AccelerationCosts {
|
|||
});
|
||||
|
||||
// Initialize individual & ancestor fee rates
|
||||
relatives.forEach(entry => this.setAncestorScores(entry));
|
||||
relatives.forEach(entry => setAncestorScores(entry));
|
||||
|
||||
// Sort by descending ancestor score
|
||||
let sortedRelatives = Array.from(relatives.values()).sort(this.mempoolComparator);
|
||||
let sortedRelatives = Array.from(relatives.values()).sort(mempoolComparator);
|
||||
|
||||
let includedInCluster: Map<string, MempoolTx> | null = null;
|
||||
let includedInCluster: Map<string, GraphTx> | null = null;
|
||||
|
||||
// While highest score >= targetFeeRate
|
||||
let maxIterations = MAX_RELATIVE_GRAPH_SIZE;
|
||||
|
@ -297,17 +191,17 @@ class AccelerationCosts {
|
|||
// Grab the highest scoring entry
|
||||
const best = sortedRelatives.shift();
|
||||
if (best) {
|
||||
const cluster = new Map<string, MempoolTx>(best.ancestors?.entries() || []);
|
||||
const cluster = new Map<string, GraphTx>(best.ancestors?.entries() || []);
|
||||
if (best.ancestors.has(tx.txid)) {
|
||||
includedInCluster = cluster;
|
||||
}
|
||||
cluster.set(best.txid, best);
|
||||
// Remove this cluster (it already pays over the target rate, so doesn't need to be boosted)
|
||||
// and update scores, ancestor totals and dependencies for the survivors
|
||||
this.removeAncestors(cluster, relatives);
|
||||
removeAncestors(cluster, relatives);
|
||||
|
||||
// re-sort
|
||||
sortedRelatives = Array.from(relatives.values()).sort(this.mempoolComparator);
|
||||
sortedRelatives = Array.from(relatives.values()).sort(mempoolComparator);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,394 +239,6 @@ class AccelerationCosts {
|
|||
nextBlockFee: Math.ceil(tx.ancestorsize * targetFeeRate),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively traverses an in-mempool dependency graph, and sets a Map of in-mempool ancestors
|
||||
* for each transaction.
|
||||
*
|
||||
* @param tx
|
||||
* @param all
|
||||
*/
|
||||
private setAncestors(tx: MempoolTx, all: Map<string, MempoolTx>, visited: Map<string, Map<string, MempoolTx>>, depth: number = 0): Map<string, MempoolTx> {
|
||||
// sanity check for infinite recursion / too many ancestors (should never happen)
|
||||
if (depth >= 100) {
|
||||
logger.warn('acceleration dependency calculation failed: setAncestors reached depth of 100, unable to proceed', `Accelerator`);
|
||||
throw new Error('invalid_tx_dependencies');
|
||||
}
|
||||
|
||||
// initialize the ancestor map for this tx
|
||||
tx.ancestors = new Map<string, MempoolTx>();
|
||||
tx.depends.forEach(parentId => {
|
||||
const parent = all.get(parentId);
|
||||
if (parent) {
|
||||
// add the parent
|
||||
tx.ancestors?.set(parentId, parent);
|
||||
// check for a cached copy of this parent's ancestors
|
||||
let ancestors = visited.get(parent.txid);
|
||||
if (!ancestors) {
|
||||
// recursively fetch the parent's ancestors
|
||||
ancestors = this.setAncestors(parent, all, visited, depth + 1);
|
||||
}
|
||||
// and add to this tx's map
|
||||
ancestors.forEach((ancestor, ancestorId) => {
|
||||
tx.ancestors?.set(ancestorId, ancestor);
|
||||
});
|
||||
}
|
||||
});
|
||||
visited.set(tx.txid, tx.ancestors);
|
||||
|
||||
return tx.ancestors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Efficiently sets a Map of in-mempool ancestors for each member of an expanded relative graph
|
||||
* by running setAncestors on each leaf, and caching intermediate results.
|
||||
* then initializes ancestor data for each transaction
|
||||
*
|
||||
* @param all
|
||||
*/
|
||||
private initializeRelatives(all: Map<string, GraphTx>): Map<string, MempoolTx> {
|
||||
const mempoolTxs = new Map<string, MempoolTx>();
|
||||
all.forEach(entry => {
|
||||
mempoolTxs.set(entry.txid, this.convertGraphToMempoolTx(entry));
|
||||
});
|
||||
const visited: Map<string, Map<string, MempoolTx>> = new Map();
|
||||
const leaves: MempoolTx[] = Array.from(mempoolTxs.values()).filter(entry => entry.spentby.length === 0);
|
||||
for (const leaf of leaves) {
|
||||
this.setAncestors(leaf, mempoolTxs, visited);
|
||||
}
|
||||
mempoolTxs.forEach(entry => {
|
||||
entry.ancestors?.forEach(ancestor => {
|
||||
entry.ancestorcount++;
|
||||
entry.ancestorsize += ancestor.vsize;
|
||||
entry.fees.ancestor += ancestor.fees.base;
|
||||
});
|
||||
this.setAncestorScores(entry);
|
||||
});
|
||||
return mempoolTxs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a cluster of transactions from an in-mempool dependency graph
|
||||
* and update the survivors' scores and ancestors
|
||||
*
|
||||
* @param cluster
|
||||
* @param ancestors
|
||||
*/
|
||||
private removeAncestors(cluster: Map<string, MempoolTx>, all: Map<string, MempoolTx>): void {
|
||||
// remove
|
||||
cluster.forEach(tx => {
|
||||
all.delete(tx.txid);
|
||||
});
|
||||
|
||||
// update survivors
|
||||
all.forEach(tx => {
|
||||
cluster.forEach(remove => {
|
||||
if (tx.ancestors?.has(remove.txid)) {
|
||||
// remove as dependency
|
||||
tx.ancestors.delete(remove.txid);
|
||||
tx.depends = tx.depends.filter(parent => parent !== remove.txid);
|
||||
// update ancestor sizes and fees
|
||||
tx.ancestorsize -= remove.vsize;
|
||||
tx.fees.ancestor -= remove.fees.base;
|
||||
}
|
||||
});
|
||||
// recalculate fee rates
|
||||
this.setAncestorScores(tx);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a mempool transaction, and set the fee rates and ancestor score
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
private setAncestorScores(tx: MempoolTx): void {
|
||||
tx.individualRate = tx.fees.base / tx.vsize;
|
||||
tx.ancestorRate = tx.fees.ancestor / tx.ancestorsize;
|
||||
tx.score = Math.min(tx.individualRate, tx.ancestorRate);
|
||||
}
|
||||
|
||||
// Sort by descending score
|
||||
private mempoolComparator(a, b): number {
|
||||
return b.score - a.score;
|
||||
}
|
||||
}
|
||||
|
||||
export default new AccelerationCosts;
|
||||
|
||||
interface TemplateTransaction {
|
||||
txid: string;
|
||||
order: number;
|
||||
weight: number;
|
||||
adjustedVsize: number; // sigop-adjusted vsize, rounded up to the nearest integer
|
||||
sigops: number;
|
||||
fee: number;
|
||||
feeDelta: number;
|
||||
ancestors: string[];
|
||||
cluster: string[];
|
||||
effectiveFeePerVsize: number;
|
||||
}
|
||||
|
||||
interface MinerTransaction extends TemplateTransaction {
|
||||
inputs: string[];
|
||||
feePerVsize: number;
|
||||
relativesSet: boolean;
|
||||
ancestorMap: Map<string, MinerTransaction>;
|
||||
children: Set<MinerTransaction>;
|
||||
ancestorFee: number;
|
||||
ancestorVsize: number;
|
||||
ancestorSigops: number;
|
||||
score: number;
|
||||
used: boolean;
|
||||
modified: boolean;
|
||||
dependencyRate: number;
|
||||
}
|
||||
|
||||
/*
|
||||
* Build a block using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*/
|
||||
export function makeBlockTemplate(candidates: IEsploraApi.Transaction[], accelerations: Acceleration[], maxBlocks: number = 8, weightLimit: number = BLOCK_WEIGHT_UNITS, sigopLimit: number = BLOCK_SIGOPS): TemplateTransaction[] {
|
||||
const auditPool: Map<string, MinerTransaction> = new Map();
|
||||
const mempoolArray: MinerTransaction[] = [];
|
||||
|
||||
candidates.forEach(tx => {
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
const adjustedVsize = Math.ceil(Math.max(tx.weight / 4, 5 * (tx.sigops || 0)));
|
||||
const feePerVsize = (tx.fee / adjustedVsize);
|
||||
auditPool.set(tx.txid, {
|
||||
txid: tx.txid,
|
||||
order: txidToOrdering(tx.txid),
|
||||
fee: tx.fee,
|
||||
feeDelta: 0,
|
||||
weight: tx.weight,
|
||||
adjustedVsize,
|
||||
feePerVsize: feePerVsize,
|
||||
effectiveFeePerVsize: feePerVsize,
|
||||
dependencyRate: feePerVsize,
|
||||
sigops: tx.sigops || 0,
|
||||
inputs: (tx.vin?.map(vin => vin.txid) || []) as string[],
|
||||
relativesSet: false,
|
||||
ancestors: [],
|
||||
cluster: [],
|
||||
ancestorMap: new Map<string, MinerTransaction>(),
|
||||
children: new Set<MinerTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorVsize: 0,
|
||||
ancestorSigops: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
});
|
||||
mempoolArray.push(auditPool.get(tx.txid) as MinerTransaction);
|
||||
});
|
||||
|
||||
// set accelerated effective fee
|
||||
for (const acceleration of accelerations) {
|
||||
const tx = auditPool.get(acceleration.txid);
|
||||
if (tx) {
|
||||
tx.feeDelta = acceleration.max_bid;
|
||||
tx.feePerVsize = ((tx.fee + tx.feeDelta) / tx.adjustedVsize);
|
||||
tx.effectiveFeePerVsize = tx.feePerVsize;
|
||||
tx.dependencyRate = tx.feePerVsize;
|
||||
}
|
||||
}
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort(priorityComparator);
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: number[][] = [];
|
||||
let blockWeight = 0;
|
||||
let blockSigops = 0;
|
||||
const transactions: MinerTransaction[] = [];
|
||||
let modified: MinerTransaction[] = [];
|
||||
const overflow: MinerTransaction[] = [];
|
||||
let failures = 0;
|
||||
while (mempoolArray.length || modified.length) {
|
||||
// skip invalid transactions
|
||||
while (mempoolArray[0].used || mempoolArray[0].modified) {
|
||||
mempoolArray.shift();
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[0];
|
||||
const nextModifiedTx = modified[0];
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
mempoolArray.shift();
|
||||
} else {
|
||||
modified.shift();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blocks.length >= (maxBlocks - 1) || ((blockWeight + (4 * nextTx.ancestorVsize) < weightLimit) && (blockSigops + nextTx.ancestorSigops <= sigopLimit))) {
|
||||
const ancestors: MinerTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const clusterTxids = sortedTxSet.map(tx => tx.txid);
|
||||
const effectiveFeeRate = Math.min(nextTx.dependencyRate || Infinity, nextTx.ancestorFee / nextTx.ancestorVsize);
|
||||
const used: MinerTransaction[] = [];
|
||||
while (sortedTxSet.length) {
|
||||
const ancestor = sortedTxSet.pop();
|
||||
if (!ancestor) {
|
||||
continue;
|
||||
}
|
||||
ancestor.used = true;
|
||||
ancestor.usedBy = nextTx.txid;
|
||||
// update this tx with effective fee rate & relatives data
|
||||
if (ancestor.effectiveFeePerVsize !== effectiveFeeRate) {
|
||||
ancestor.effectiveFeePerVsize = effectiveFeeRate;
|
||||
}
|
||||
ancestor.cluster = clusterTxids;
|
||||
transactions.push(ancestor);
|
||||
blockWeight += ancestor.weight;
|
||||
blockSigops += ancestor.sigops;
|
||||
used.push(ancestor);
|
||||
}
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (used.length) {
|
||||
used.forEach(tx => {
|
||||
modified = updateDescendants(tx, auditPool, modified, effectiveFeeRate);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (weightLimit - 4000);
|
||||
const queueEmpty = !mempoolArray.length && !modified.length;
|
||||
|
||||
if (exceededPackageTries || queueEmpty) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const tx of transactions) {
|
||||
tx.ancestors = Object.values(tx.ancestorMap);
|
||||
}
|
||||
|
||||
return transactions;
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
function setRelatives(
|
||||
tx: MinerTransaction,
|
||||
mempool: Map<string, MinerTransaction>,
|
||||
): void {
|
||||
for (const parent of tx.inputs) {
|
||||
const parentTx = mempool.get(parent);
|
||||
if (parentTx && !tx.ancestorMap?.has(parent)) {
|
||||
tx.ancestorMap.set(parent, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = (tx.fee + tx.feeDelta);
|
||||
tx.ancestorVsize = tx.adjustedVsize || 0;
|
||||
tx.ancestorSigops = tx.sigops || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += (ancestor.fee + ancestor.feeDelta);
|
||||
tx.ancestorVsize += ancestor.adjustedVsize;
|
||||
tx.ancestorSigops += ancestor.sigops;
|
||||
});
|
||||
tx.score = tx.ancestorFee / tx.ancestorVsize;
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
function updateDescendants(
|
||||
rootTx: MinerTransaction,
|
||||
mempool: Map<string, MinerTransaction>,
|
||||
modified: MinerTransaction[],
|
||||
clusterRate: number,
|
||||
): MinerTransaction[] {
|
||||
const descendantSet: Set<MinerTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: MinerTransaction[] = [];
|
||||
let descendantTx: MinerTransaction | undefined;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= (rootTx.fee + rootTx.feeDelta);
|
||||
descendantTx.ancestorVsize -= rootTx.adjustedVsize;
|
||||
descendantTx.ancestorSigops -= rootTx.sigops;
|
||||
descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorVsize;
|
||||
descendantTx.dependencyRate = descendantTx.dependencyRate ? Math.min(descendantTx.dependencyRate, clusterRate) : clusterRate;
|
||||
|
||||
if (!descendantTx.modified) {
|
||||
descendantTx.modified = true;
|
||||
modified.push(descendantTx);
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// return new, resorted modified list
|
||||
return modified.sort(priorityComparator);
|
||||
}
|
||||
|
||||
// Used to sort an array of MinerTransactions by descending ancestor score
|
||||
function priorityComparator(a: MinerTransaction, b: MinerTransaction): number {
|
||||
if (b.score === a.score) {
|
||||
// tie-break by txid for stability
|
||||
return a.order - b.order;
|
||||
} else {
|
||||
return b.score - a.score;
|
||||
}
|
||||
}
|
||||
|
||||
// returns the most significant 4 bytes of the txid as an integer
|
||||
function txidToOrdering(txid: string): number {
|
||||
return parseInt(
|
||||
txid.substring(62, 64) +
|
||||
txid.substring(60, 62) +
|
||||
txid.substring(58, 60) +
|
||||
txid.substring(56, 58),
|
||||
16
|
||||
);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ import bitcoinClient from './bitcoin-client';
|
|||
import difficultyAdjustment from '../difficulty-adjustment';
|
||||
import transactionRepository from '../../repositories/TransactionRepository';
|
||||
import rbfCache from '../rbf-cache';
|
||||
import { calculateCpfp } from '../cpfp';
|
||||
import { calculateMempoolTxCpfp } from '../cpfp';
|
||||
|
||||
class BitcoinRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
|
@ -160,6 +160,7 @@ class BitcoinRoutes {
|
|||
descendants: tx.descendants || null,
|
||||
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
|
||||
sigops: tx.sigops,
|
||||
fee: tx.fee,
|
||||
adjustedVsize: tx.adjustedVsize,
|
||||
acceleration: tx.acceleration,
|
||||
acceleratedBy: tx.acceleratedBy || undefined,
|
||||
|
@ -168,7 +169,7 @@ class BitcoinRoutes {
|
|||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = calculateCpfp(tx, mempool.getMempool());
|
||||
const cpfpInfo = calculateMempoolTxCpfp(tx, mempool.getMempool());
|
||||
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
|
|
|
@ -30,6 +30,9 @@ import redisCache from './redis-cache';
|
|||
import rbfCache from './rbf-cache';
|
||||
import { calcBitsDifference } from './difficulty-adjustment';
|
||||
import AccelerationRepository from '../repositories/AccelerationRepository';
|
||||
import { calculateFastBlockCpfp, calculateGoodBlockCpfp } from './cpfp';
|
||||
import mempool from './mempool';
|
||||
import CpfpRepository from '../repositories/CpfpRepository';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
|
@ -567,8 +570,11 @@ class Blocks {
|
|||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
const currentBlockHeight = blockchainInfo.blocks;
|
||||
|
||||
const unclassifiedBlocksList = await BlocksSummariesRepository.$getSummariesWithVersion(0);
|
||||
const unclassifiedTemplatesList = await BlocksSummariesRepository.$getTemplatesWithVersion(0);
|
||||
const targetSummaryVersion: number = 1;
|
||||
const targetTemplateVersion: number = 1;
|
||||
|
||||
const unclassifiedBlocksList = await BlocksSummariesRepository.$getSummariesBelowVersion(targetSummaryVersion);
|
||||
const unclassifiedTemplatesList = await BlocksSummariesRepository.$getTemplatesBelowVersion(targetTemplateVersion);
|
||||
|
||||
// nothing to do
|
||||
if (!unclassifiedBlocksList?.length && !unclassifiedTemplatesList?.length) {
|
||||
|
@ -601,16 +607,24 @@ class Blocks {
|
|||
|
||||
for (let height = currentBlockHeight; height >= 0; height--) {
|
||||
try {
|
||||
let txs: TransactionExtended[] | null = null;
|
||||
let txs: MempoolTransactionExtended[] | null = null;
|
||||
if (unclassifiedBlocks[height]) {
|
||||
const blockHash = unclassifiedBlocks[height];
|
||||
// fetch transactions
|
||||
txs = (await bitcoinApi.$getTxsForBlock(blockHash)).map(tx => transactionUtils.extendTransaction(tx)) || [];
|
||||
txs = (await bitcoinApi.$getTxsForBlock(blockHash)).map(tx => transactionUtils.extendMempoolTransaction(tx)) || [];
|
||||
// add CPFP
|
||||
const cpfpSummary = Common.calculateCpfp(height, txs, true);
|
||||
const cpfpSummary = calculateGoodBlockCpfp(height, txs, []);
|
||||
// classify
|
||||
const { transactions: classifiedTxs } = this.summarizeBlockTransactions(blockHash, cpfpSummary.transactions);
|
||||
await BlocksSummariesRepository.$saveTransactions(height, blockHash, classifiedTxs, 1);
|
||||
await BlocksSummariesRepository.$saveTransactions(height, blockHash, classifiedTxs, 2);
|
||||
if (unclassifiedBlocks[height].version < 2 && targetSummaryVersion === 2) {
|
||||
const cpfpClusters = await CpfpRepository.$getClustersAt(height);
|
||||
if (!cpfpRepository.compareClusters(cpfpClusters, cpfpSummary.clusters)) {
|
||||
// CPFP clusters changed - update the compact_cpfp tables
|
||||
await CpfpRepository.$deleteClustersAt(height);
|
||||
await this.$saveCpfp(blockHash, height, cpfpSummary);
|
||||
}
|
||||
}
|
||||
await Common.sleep$(250);
|
||||
}
|
||||
if (unclassifiedTemplates[height]) {
|
||||
|
@ -636,7 +650,7 @@ class Blocks {
|
|||
}
|
||||
templateTxs.push(tx || templateTx);
|
||||
}
|
||||
const cpfpSummary = Common.calculateCpfp(height, templateTxs?.filter(tx => tx['effectiveFeePerVsize'] != null) as TransactionExtended[], true);
|
||||
const cpfpSummary = calculateGoodBlockCpfp(height, templateTxs?.filter(tx => tx['effectiveFeePerVsize'] != null) as MempoolTransactionExtended[], []);
|
||||
// classify
|
||||
const { transactions: classifiedTxs } = this.summarizeBlockTransactions(blockHash, cpfpSummary.transactions);
|
||||
const classifiedTxMap: { [txid: string]: TransactionClassified } = {};
|
||||
|
@ -890,7 +904,7 @@ class Blocks {
|
|||
}
|
||||
}
|
||||
|
||||
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
|
||||
const cpfpSummary: CpfpSummary = calculateGoodBlockCpfp(block.height, transactions, Object.values(mempool.getAccelerations()).map(a => ({ txid: a.txid, max_bid: a.feeDelta })));
|
||||
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
|
||||
const blockSummary: BlockSummary = this.summarizeBlockTransactions(block.id, cpfpSummary.transactions);
|
||||
this.updateTimerProgress(timer, `got block data for ${this.currentBlockHeight}`);
|
||||
|
@ -1149,7 +1163,7 @@ class Blocks {
|
|||
transactions: cpfpSummary.transactions.map(tx => {
|
||||
let flags: number = 0;
|
||||
try {
|
||||
flags = tx.flags || Common.getTransactionFlags(tx);
|
||||
flags = Common.getTransactionFlags(tx);
|
||||
} catch (e) {
|
||||
logger.warn('Failed to classify transaction: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
@ -1399,7 +1413,7 @@ class Blocks {
|
|||
}
|
||||
|
||||
if (transactions?.length != null) {
|
||||
const summary = Common.calculateCpfp(height, transactions as TransactionExtended[]);
|
||||
const summary = calculateFastBlockCpfp(height, transactions as TransactionExtended[]);
|
||||
|
||||
await this.$saveCpfp(hash, height, summary);
|
||||
|
||||
|
|
|
@ -419,12 +419,15 @@ export class Common {
|
|||
let flags = tx.flags ? BigInt(tx.flags) : 0n;
|
||||
|
||||
// Update variable flags (CPFP, RBF)
|
||||
flags &= ~TransactionFlags.cpfp_child;
|
||||
if (tx.ancestors?.length) {
|
||||
flags |= TransactionFlags.cpfp_child;
|
||||
}
|
||||
flags &= ~TransactionFlags.cpfp_parent;
|
||||
if (tx.descendants?.length) {
|
||||
flags |= TransactionFlags.cpfp_parent;
|
||||
}
|
||||
flags &= ~TransactionFlags.replacement;
|
||||
if (tx.replacement) {
|
||||
flags |= TransactionFlags.replacement;
|
||||
}
|
||||
|
@ -806,96 +809,6 @@ export class Common {
|
|||
}
|
||||
}
|
||||
|
||||
static calculateCpfp(height: number, transactions: TransactionExtended[], saveRelatives: boolean = false): CpfpSummary {
|
||||
const clusters: CpfpCluster[] = []; // list of all cpfp clusters in this block
|
||||
const clusterMap: { [txid: string]: CpfpCluster } = {}; // map transactions to their cpfp cluster
|
||||
let clusterTxs: TransactionExtended[] = []; // working list of elements of the current cluster
|
||||
let ancestors: { [txid: string]: boolean } = {}; // working set of ancestors of the current cluster root
|
||||
const txMap: { [txid: string]: TransactionExtended } = {};
|
||||
// initialize the txMap
|
||||
for (const tx of transactions) {
|
||||
txMap[tx.txid] = tx;
|
||||
}
|
||||
// reverse pass to identify CPFP clusters
|
||||
for (let i = transactions.length - 1; i >= 0; i--) {
|
||||
const tx = transactions[i];
|
||||
if (!ancestors[tx.txid]) {
|
||||
let totalFee = 0;
|
||||
let totalVSize = 0;
|
||||
clusterTxs.forEach(tx => {
|
||||
totalFee += tx?.fee || 0;
|
||||
totalVSize += (tx.weight / 4);
|
||||
});
|
||||
const effectiveFeePerVsize = totalFee / totalVSize;
|
||||
let cluster: CpfpCluster;
|
||||
if (clusterTxs.length > 1) {
|
||||
cluster = {
|
||||
root: clusterTxs[0].txid,
|
||||
height,
|
||||
txs: clusterTxs.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
|
||||
effectiveFeePerVsize,
|
||||
};
|
||||
clusters.push(cluster);
|
||||
}
|
||||
clusterTxs.forEach(tx => {
|
||||
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
|
||||
if (cluster) {
|
||||
clusterMap[tx.txid] = cluster;
|
||||
}
|
||||
});
|
||||
// reset working vars
|
||||
clusterTxs = [];
|
||||
ancestors = {};
|
||||
}
|
||||
clusterTxs.push(tx);
|
||||
tx.vin.forEach(vin => {
|
||||
ancestors[vin.txid] = true;
|
||||
});
|
||||
}
|
||||
// forward pass to enforce ancestor rate caps
|
||||
for (const tx of transactions) {
|
||||
let minAncestorRate = tx.effectiveFeePerVsize;
|
||||
for (const vin of tx.vin) {
|
||||
if (txMap[vin.txid]?.effectiveFeePerVsize) {
|
||||
minAncestorRate = Math.min(minAncestorRate, txMap[vin.txid].effectiveFeePerVsize);
|
||||
}
|
||||
}
|
||||
// check rounded values to skip cases with almost identical fees
|
||||
const roundedMinAncestorRate = Math.ceil(minAncestorRate);
|
||||
const roundedEffectiveFeeRate = Math.floor(tx.effectiveFeePerVsize);
|
||||
if (roundedMinAncestorRate < roundedEffectiveFeeRate) {
|
||||
tx.effectiveFeePerVsize = minAncestorRate;
|
||||
if (!clusterMap[tx.txid]) {
|
||||
// add a single-tx cluster to record the dependent rate
|
||||
const cluster = {
|
||||
root: tx.txid,
|
||||
height,
|
||||
txs: [{ txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }],
|
||||
effectiveFeePerVsize: minAncestorRate,
|
||||
};
|
||||
clusterMap[tx.txid] = cluster;
|
||||
clusters.push(cluster);
|
||||
} else {
|
||||
// update the existing cluster with the dependent rate
|
||||
clusterMap[tx.txid].effectiveFeePerVsize = minAncestorRate;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (saveRelatives) {
|
||||
for (const cluster of clusters) {
|
||||
cluster.txs.forEach((member, index) => {
|
||||
txMap[member.txid].descendants = cluster.txs.slice(0, index).reverse();
|
||||
txMap[member.txid].ancestors = cluster.txs.slice(index + 1).reverse();
|
||||
txMap[member.txid].effectiveFeePerVsize = cluster.effectiveFeePerVsize;
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
transactions,
|
||||
clusters,
|
||||
};
|
||||
}
|
||||
|
||||
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string, acceleration?: boolean }[]): EffectiveFeeStats {
|
||||
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
|
||||
|
||||
|
|
|
@ -1,29 +1,172 @@
|
|||
import { CpfpInfo, MempoolTransactionExtended } from '../mempool.interfaces';
|
||||
import { Ancestor, CpfpCluster, CpfpInfo, CpfpSummary, MempoolTransactionExtended, TransactionExtended } from '../mempool.interfaces';
|
||||
import { GraphTx, convertToGraphTx, expandRelativesGraph, initializeRelatives, makeBlockTemplate, mempoolComparator, removeAncestors, setAncestorScores } from './mini-miner';
|
||||
import memPool from './mempool';
|
||||
import { Acceleration } from './acceleration/acceleration';
|
||||
|
||||
const CPFP_UPDATE_INTERVAL = 60_000; // update CPFP info at most once per 60s per transaction
|
||||
const MAX_GRAPH_SIZE = 50; // the maximum number of in-mempool relatives to consider
|
||||
const MAX_CLUSTER_ITERATIONS = 100;
|
||||
|
||||
interface GraphTx extends MempoolTransactionExtended {
|
||||
depends: string[];
|
||||
spentby: string[];
|
||||
ancestorMap: Map<string, GraphTx>;
|
||||
fees: {
|
||||
base: number;
|
||||
ancestor: number;
|
||||
export function calculateFastBlockCpfp(height: number, transactions: TransactionExtended[], saveRelatives: boolean = false): CpfpSummary {
|
||||
const clusters: CpfpCluster[] = []; // list of all cpfp clusters in this block
|
||||
const clusterMap: { [txid: string]: CpfpCluster } = {}; // map transactions to their cpfp cluster
|
||||
let clusterTxs: TransactionExtended[] = []; // working list of elements of the current cluster
|
||||
let ancestors: { [txid: string]: boolean } = {}; // working set of ancestors of the current cluster root
|
||||
const txMap: { [txid: string]: TransactionExtended } = {};
|
||||
// initialize the txMap
|
||||
for (const tx of transactions) {
|
||||
txMap[tx.txid] = tx;
|
||||
}
|
||||
// reverse pass to identify CPFP clusters
|
||||
for (let i = transactions.length - 1; i >= 0; i--) {
|
||||
const tx = transactions[i];
|
||||
if (!ancestors[tx.txid]) {
|
||||
let totalFee = 0;
|
||||
let totalVSize = 0;
|
||||
clusterTxs.forEach(tx => {
|
||||
totalFee += tx?.fee || 0;
|
||||
totalVSize += (tx.weight / 4);
|
||||
});
|
||||
const effectiveFeePerVsize = totalFee / totalVSize;
|
||||
let cluster: CpfpCluster;
|
||||
if (clusterTxs.length > 1) {
|
||||
cluster = {
|
||||
root: clusterTxs[0].txid,
|
||||
height,
|
||||
txs: clusterTxs.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
|
||||
effectiveFeePerVsize,
|
||||
};
|
||||
clusters.push(cluster);
|
||||
}
|
||||
clusterTxs.forEach(tx => {
|
||||
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
|
||||
if (cluster) {
|
||||
clusterMap[tx.txid] = cluster;
|
||||
}
|
||||
});
|
||||
// reset working vars
|
||||
clusterTxs = [];
|
||||
ancestors = {};
|
||||
}
|
||||
clusterTxs.push(tx);
|
||||
tx.vin.forEach(vin => {
|
||||
ancestors[vin.txid] = true;
|
||||
});
|
||||
}
|
||||
// forward pass to enforce ancestor rate caps
|
||||
for (const tx of transactions) {
|
||||
let minAncestorRate = tx.effectiveFeePerVsize;
|
||||
for (const vin of tx.vin) {
|
||||
if (txMap[vin.txid]?.effectiveFeePerVsize) {
|
||||
minAncestorRate = Math.min(minAncestorRate, txMap[vin.txid].effectiveFeePerVsize);
|
||||
}
|
||||
}
|
||||
// check rounded values to skip cases with almost identical fees
|
||||
const roundedMinAncestorRate = Math.ceil(minAncestorRate);
|
||||
const roundedEffectiveFeeRate = Math.floor(tx.effectiveFeePerVsize);
|
||||
if (roundedMinAncestorRate < roundedEffectiveFeeRate) {
|
||||
tx.effectiveFeePerVsize = minAncestorRate;
|
||||
if (!clusterMap[tx.txid]) {
|
||||
// add a single-tx cluster to record the dependent rate
|
||||
const cluster = {
|
||||
root: tx.txid,
|
||||
height,
|
||||
txs: [{ txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }],
|
||||
effectiveFeePerVsize: minAncestorRate,
|
||||
};
|
||||
clusterMap[tx.txid] = cluster;
|
||||
clusters.push(cluster);
|
||||
} else {
|
||||
// update the existing cluster with the dependent rate
|
||||
clusterMap[tx.txid].effectiveFeePerVsize = minAncestorRate;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (saveRelatives) {
|
||||
for (const cluster of clusters) {
|
||||
cluster.txs.forEach((member, index) => {
|
||||
txMap[member.txid].descendants = cluster.txs.slice(0, index).reverse();
|
||||
txMap[member.txid].ancestors = cluster.txs.slice(index + 1).reverse();
|
||||
txMap[member.txid].effectiveFeePerVsize = cluster.effectiveFeePerVsize;
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
transactions,
|
||||
clusters,
|
||||
};
|
||||
}
|
||||
|
||||
export function calculateGoodBlockCpfp(height: number, transactions: MempoolTransactionExtended[], accelerations: Acceleration[]): CpfpSummary {
|
||||
const txMap: { [txid: string]: MempoolTransactionExtended } = {};
|
||||
for (const tx of transactions) {
|
||||
txMap[tx.txid] = tx;
|
||||
}
|
||||
const template = makeBlockTemplate(transactions, accelerations, 1, Infinity, Infinity);
|
||||
const clusters = new Map<string, string[]>();
|
||||
for (const tx of template) {
|
||||
const cluster = tx.cluster || [];
|
||||
const root = cluster.length ? cluster[cluster.length - 1] : null;
|
||||
if (cluster.length > 1 && root && !clusters.has(root)) {
|
||||
clusters.set(root, cluster);
|
||||
}
|
||||
txMap[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
|
||||
}
|
||||
|
||||
const clusterArray: CpfpCluster[] = [];
|
||||
|
||||
for (const cluster of clusters.values()) {
|
||||
for (const txid of cluster) {
|
||||
const mempoolTx = txMap[txid];
|
||||
if (mempoolTx) {
|
||||
const ancestors: Ancestor[] = [];
|
||||
const descendants: Ancestor[] = [];
|
||||
let matched = false;
|
||||
cluster.forEach(relativeTxid => {
|
||||
if (relativeTxid === txid) {
|
||||
matched = true;
|
||||
} else {
|
||||
const relative = {
|
||||
txid: relativeTxid,
|
||||
fee: txMap[relativeTxid].fee,
|
||||
weight: (txMap[relativeTxid].adjustedVsize * 4) || txMap[relativeTxid].weight,
|
||||
};
|
||||
if (matched) {
|
||||
descendants.push(relative);
|
||||
} else {
|
||||
ancestors.push(relative);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (mempoolTx.ancestors?.length !== ancestors.length || mempoolTx.descendants?.length !== descendants.length) {
|
||||
mempoolTx.cpfpDirty = true;
|
||||
}
|
||||
Object.assign(mempoolTx, { ancestors, descendants, bestDescendant: null, cpfpChecked: true });
|
||||
}
|
||||
}
|
||||
const root = cluster[cluster.length - 1];
|
||||
clusterArray.push({
|
||||
root: root,
|
||||
height,
|
||||
txs: cluster.reverse().map(txid => ({
|
||||
txid,
|
||||
fee: txMap[txid].fee,
|
||||
weight: (txMap[txid].adjustedVsize * 4) || txMap[txid].weight,
|
||||
})),
|
||||
effectiveFeePerVsize: txMap[root].effectiveFeePerVsize,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
transactions: transactions.map(tx => txMap[tx.txid]),
|
||||
clusters: clusterArray,
|
||||
};
|
||||
ancestorcount: number;
|
||||
ancestorsize: number;
|
||||
ancestorRate: number;
|
||||
individualRate: number;
|
||||
score: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a mempool transaction and a copy of the current mempool, and calculates the CPFP data for
|
||||
* that transaction (and all others in the same cluster)
|
||||
*/
|
||||
export function calculateCpfp(tx: MempoolTransactionExtended, mempool: { [txid: string]: MempoolTransactionExtended }): CpfpInfo {
|
||||
export function calculateMempoolTxCpfp(tx: MempoolTransactionExtended, mempool: { [txid: string]: MempoolTransactionExtended }): CpfpInfo {
|
||||
if (tx.cpfpUpdated && Date.now() < (tx.cpfpUpdated + CPFP_UPDATE_INTERVAL)) {
|
||||
tx.cpfpDirty = false;
|
||||
return {
|
||||
|
@ -32,30 +175,31 @@ export function calculateCpfp(tx: MempoolTransactionExtended, mempool: { [txid:
|
|||
descendants: tx.descendants || [],
|
||||
effectiveFeePerVsize: tx.effectiveFeePerVsize || tx.adjustedFeePerVsize || tx.feePerVsize,
|
||||
sigops: tx.sigops,
|
||||
fee: tx.fee,
|
||||
adjustedVsize: tx.adjustedVsize,
|
||||
acceleration: tx.acceleration
|
||||
};
|
||||
}
|
||||
|
||||
const ancestorMap = new Map<string, GraphTx>();
|
||||
const graphTx = mempoolToGraphTx(tx);
|
||||
const graphTx = convertToGraphTx(tx, memPool.getSpendMap());
|
||||
ancestorMap.set(tx.txid, graphTx);
|
||||
|
||||
const allRelatives = expandRelativesGraph(mempool, ancestorMap);
|
||||
const allRelatives = expandRelativesGraph(mempool, ancestorMap, memPool.getSpendMap());
|
||||
const relativesMap = initializeRelatives(allRelatives);
|
||||
const cluster = calculateCpfpCluster(tx.txid, relativesMap);
|
||||
|
||||
let totalVsize = 0;
|
||||
let totalFee = 0;
|
||||
for (const tx of cluster.values()) {
|
||||
totalVsize += tx.adjustedVsize;
|
||||
totalFee += tx.fee;
|
||||
totalVsize += tx.vsize;
|
||||
totalFee += tx.fees.base;
|
||||
}
|
||||
const effectiveFeePerVsize = totalFee / totalVsize;
|
||||
for (const tx of cluster.values()) {
|
||||
mempool[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
|
||||
mempool[tx.txid].ancestors = Array.from(tx.ancestorMap.values()).map(tx => ({ txid: tx.txid, weight: tx.weight, fee: tx.fee }));
|
||||
mempool[tx.txid].descendants = Array.from(cluster.values()).filter(entry => entry.txid !== tx.txid && !tx.ancestorMap.has(entry.txid)).map(tx => ({ txid: tx.txid, weight: tx.weight, fee: tx.fee }));
|
||||
mempool[tx.txid].ancestors = Array.from(tx.ancestors.values()).map(tx => ({ txid: tx.txid, weight: tx.weight, fee: tx.fees.base }));
|
||||
mempool[tx.txid].descendants = Array.from(cluster.values()).filter(entry => entry.txid !== tx.txid && !tx.ancestors.has(entry.txid)).map(tx => ({ txid: tx.txid, weight: tx.weight, fee: tx.fees.base }));
|
||||
mempool[tx.txid].bestDescendant = null;
|
||||
mempool[tx.txid].cpfpChecked = true;
|
||||
mempool[tx.txid].cpfpDirty = true;
|
||||
|
@ -70,88 +214,12 @@ export function calculateCpfp(tx: MempoolTransactionExtended, mempool: { [txid:
|
|||
descendants: tx.descendants || [],
|
||||
effectiveFeePerVsize: tx.effectiveFeePerVsize || tx.adjustedFeePerVsize || tx.feePerVsize,
|
||||
sigops: tx.sigops,
|
||||
fee: tx.fee,
|
||||
adjustedVsize: tx.adjustedVsize,
|
||||
acceleration: tx.acceleration
|
||||
};
|
||||
}
|
||||
|
||||
function mempoolToGraphTx(tx: MempoolTransactionExtended): GraphTx {
|
||||
return {
|
||||
...tx,
|
||||
depends: tx.vin.map(v => v.txid),
|
||||
spentby: tx.vout.map((v, i) => memPool.getFromSpendMap(tx.txid, i)).map(tx => tx?.txid).filter(txid => txid != null) as string[],
|
||||
ancestorMap: new Map(),
|
||||
fees: {
|
||||
base: tx.fee,
|
||||
ancestor: tx.fee,
|
||||
},
|
||||
ancestorcount: 1,
|
||||
ancestorsize: tx.adjustedVsize,
|
||||
ancestorRate: 0,
|
||||
individualRate: 0,
|
||||
score: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a map of transaction ancestors, and expands it into a full graph of up to MAX_GRAPH_SIZE in-mempool relatives
|
||||
*/
|
||||
function expandRelativesGraph(mempool: { [txid: string]: MempoolTransactionExtended }, ancestors: Map<string, GraphTx>): Map<string, GraphTx> {
|
||||
const relatives: Map<string, GraphTx> = new Map();
|
||||
const stack: GraphTx[] = Array.from(ancestors.values());
|
||||
while (stack.length > 0) {
|
||||
if (relatives.size > MAX_GRAPH_SIZE) {
|
||||
return relatives;
|
||||
}
|
||||
|
||||
const nextTx = stack.pop();
|
||||
if (!nextTx) {
|
||||
continue;
|
||||
}
|
||||
relatives.set(nextTx.txid, nextTx);
|
||||
|
||||
for (const relativeTxid of [...nextTx.depends, ...nextTx.spentby]) {
|
||||
if (relatives.has(relativeTxid)) {
|
||||
// already processed this tx
|
||||
continue;
|
||||
}
|
||||
let mempoolTx = ancestors.get(relativeTxid);
|
||||
if (!mempoolTx && mempool[relativeTxid]) {
|
||||
mempoolTx = mempoolToGraphTx(mempool[relativeTxid]);
|
||||
}
|
||||
if (mempoolTx) {
|
||||
stack.push(mempoolTx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relatives;
|
||||
}
|
||||
|
||||
/**
|
||||
* Efficiently sets a Map of in-mempool ancestors for each member of an expanded relative graph
|
||||
* by running setAncestors on each leaf, and caching intermediate results.
|
||||
* then initializes ancestor data for each transaction
|
||||
*
|
||||
* @param all
|
||||
*/
|
||||
function initializeRelatives(mempoolTxs: Map<string, GraphTx>): Map<string, GraphTx> {
|
||||
const visited: Map<string, Map<string, GraphTx>> = new Map();
|
||||
const leaves: GraphTx[] = Array.from(mempoolTxs.values()).filter(entry => entry.spentby.length === 0);
|
||||
for (const leaf of leaves) {
|
||||
setAncestors(leaf, mempoolTxs, visited);
|
||||
}
|
||||
mempoolTxs.forEach(entry => {
|
||||
entry.ancestorMap?.forEach(ancestor => {
|
||||
entry.ancestorcount++;
|
||||
entry.ancestorsize += ancestor.adjustedVsize;
|
||||
entry.fees.ancestor += ancestor.fees.base;
|
||||
});
|
||||
setAncestorScores(entry);
|
||||
});
|
||||
return mempoolTxs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a root transaction and a list of in-mempool ancestors,
|
||||
* Calculate the CPFP cluster
|
||||
|
@ -172,10 +240,10 @@ function calculateCpfpCluster(txid: string, graph: Map<string, GraphTx>): Map<st
|
|||
let sortedRelatives = Array.from(graph.values()).sort(mempoolComparator);
|
||||
|
||||
// Iterate until we reach a cluster that includes our target tx
|
||||
let maxIterations = MAX_GRAPH_SIZE;
|
||||
let maxIterations = MAX_CLUSTER_ITERATIONS;
|
||||
let best = sortedRelatives.shift();
|
||||
let bestCluster = new Map<string, GraphTx>(best?.ancestorMap?.entries() || []);
|
||||
while (sortedRelatives.length && best && (best.txid !== tx.txid && !best.ancestorMap.has(tx.txid)) && maxIterations > 0) {
|
||||
let bestCluster = new Map<string, GraphTx>(best?.ancestors?.entries() || []);
|
||||
while (sortedRelatives.length && best && (best.txid !== tx.txid && !best.ancestors.has(tx.txid)) && maxIterations > 0) {
|
||||
maxIterations--;
|
||||
if ((best && best.txid === tx.txid) || (bestCluster && bestCluster.has(tx.txid))) {
|
||||
break;
|
||||
|
@ -190,7 +258,7 @@ function calculateCpfpCluster(txid: string, graph: Map<string, GraphTx>): Map<st
|
|||
// Grab the next highest scoring entry
|
||||
best = sortedRelatives.shift();
|
||||
if (best) {
|
||||
bestCluster = new Map<string, GraphTx>(best?.ancestorMap?.entries() || []);
|
||||
bestCluster = new Map<string, GraphTx>(best?.ancestors?.entries() || []);
|
||||
bestCluster.set(best?.txid, best);
|
||||
}
|
||||
}
|
||||
|
@ -200,87 +268,3 @@ function calculateCpfpCluster(txid: string, graph: Map<string, GraphTx>): Map<st
|
|||
|
||||
return bestCluster;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a cluster of transactions from an in-mempool dependency graph
|
||||
* and update the survivors' scores and ancestors
|
||||
*
|
||||
* @param cluster
|
||||
* @param ancestors
|
||||
*/
|
||||
function removeAncestors(cluster: Map<string, GraphTx>, all: Map<string, GraphTx>): void {
|
||||
// remove
|
||||
cluster.forEach(tx => {
|
||||
all.delete(tx.txid);
|
||||
});
|
||||
|
||||
// update survivors
|
||||
all.forEach(tx => {
|
||||
cluster.forEach(remove => {
|
||||
if (tx.ancestorMap?.has(remove.txid)) {
|
||||
// remove as dependency
|
||||
tx.ancestorMap.delete(remove.txid);
|
||||
tx.depends = tx.depends.filter(parent => parent !== remove.txid);
|
||||
// update ancestor sizes and fees
|
||||
tx.ancestorsize -= remove.adjustedVsize;
|
||||
tx.fees.ancestor -= remove.fees.base;
|
||||
}
|
||||
});
|
||||
// recalculate fee rates
|
||||
setAncestorScores(tx);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively traverses an in-mempool dependency graph, and sets a Map of in-mempool ancestors
|
||||
* for each transaction.
|
||||
*
|
||||
* @param tx
|
||||
* @param all
|
||||
*/
|
||||
function setAncestors(tx: GraphTx, all: Map<string, GraphTx>, visited: Map<string, Map<string, GraphTx>>, depth: number = 0): Map<string, GraphTx> {
|
||||
// sanity check for infinite recursion / too many ancestors (should never happen)
|
||||
if (depth > MAX_GRAPH_SIZE) {
|
||||
return tx.ancestorMap;
|
||||
}
|
||||
|
||||
// initialize the ancestor map for this tx
|
||||
tx.ancestorMap = new Map<string, GraphTx>();
|
||||
tx.depends.forEach(parentId => {
|
||||
const parent = all.get(parentId);
|
||||
if (parent) {
|
||||
// add the parent
|
||||
tx.ancestorMap?.set(parentId, parent);
|
||||
// check for a cached copy of this parent's ancestors
|
||||
let ancestors = visited.get(parent.txid);
|
||||
if (!ancestors) {
|
||||
// recursively fetch the parent's ancestors
|
||||
ancestors = setAncestors(parent, all, visited, depth + 1);
|
||||
}
|
||||
// and add to this tx's map
|
||||
ancestors.forEach((ancestor, ancestorId) => {
|
||||
tx.ancestorMap?.set(ancestorId, ancestor);
|
||||
});
|
||||
}
|
||||
});
|
||||
visited.set(tx.txid, tx.ancestorMap);
|
||||
|
||||
return tx.ancestorMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a mempool transaction, and set the fee rates and ancestor score
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
function setAncestorScores(tx: GraphTx): GraphTx {
|
||||
tx.individualRate = (tx.fees.base * 100_000_000) / tx.adjustedVsize;
|
||||
tx.ancestorRate = (tx.fees.ancestor * 100_000_000) / tx.ancestorsize;
|
||||
tx.score = Math.min(tx.individualRate, tx.ancestorRate);
|
||||
return tx;
|
||||
}
|
||||
|
||||
// Sort by descending score
|
||||
function mempoolComparator(a: GraphTx, b: GraphTx): number {
|
||||
return b.score - a.score;
|
||||
}
|
515
backend/src/api/mini-miner.ts
Normal file
515
backend/src/api/mini-miner.ts
Normal file
|
@ -0,0 +1,515 @@
|
|||
import { Acceleration } from './acceleration/acceleration';
|
||||
import { MempoolTransactionExtended } from '../mempool.interfaces';
|
||||
import logger from '../logger';
|
||||
|
||||
const BLOCK_WEIGHT_UNITS = 4_000_000;
|
||||
const BLOCK_SIGOPS = 80_000;
|
||||
const MAX_RELATIVE_GRAPH_SIZE = 100;
|
||||
|
||||
export interface GraphTx {
|
||||
txid: string;
|
||||
vsize: number;
|
||||
weight: number;
|
||||
depends: string[];
|
||||
spentby: string[];
|
||||
|
||||
ancestorcount: number;
|
||||
ancestorsize: number;
|
||||
fees: { // in sats
|
||||
base: number;
|
||||
ancestor: number;
|
||||
};
|
||||
|
||||
ancestors: Map<string, GraphTx>,
|
||||
ancestorRate: number;
|
||||
individualRate: number;
|
||||
score: number;
|
||||
}
|
||||
|
||||
interface TemplateTransaction {
|
||||
txid: string;
|
||||
order: number;
|
||||
weight: number;
|
||||
adjustedVsize: number; // sigop-adjusted vsize, rounded up to the nearest integer
|
||||
sigops: number;
|
||||
fee: number;
|
||||
feeDelta: number;
|
||||
ancestors: string[];
|
||||
cluster: string[];
|
||||
effectiveFeePerVsize: number;
|
||||
}
|
||||
|
||||
interface MinerTransaction extends TemplateTransaction {
|
||||
inputs: string[];
|
||||
feePerVsize: number;
|
||||
relativesSet: boolean;
|
||||
ancestorMap: Map<string, MinerTransaction>;
|
||||
children: Set<MinerTransaction>;
|
||||
ancestorFee: number;
|
||||
ancestorVsize: number;
|
||||
ancestorSigops: number;
|
||||
score: number;
|
||||
used: boolean;
|
||||
modified: boolean;
|
||||
dependencyRate: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a raw transaction, and builds a graph of same-block relatives,
|
||||
* and returns as a GraphTx
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
export function getSameBlockRelatives(tx: MempoolTransactionExtended, transactions: MempoolTransactionExtended[]): Map<string, GraphTx> {
|
||||
const blockTxs = new Map<string, MempoolTransactionExtended>(); // map of txs in this block
|
||||
const spendMap = new Map<string, string>(); // map of outpoints to spending txids
|
||||
for (const tx of transactions) {
|
||||
blockTxs.set(tx.txid, tx);
|
||||
for (const vin of tx.vin) {
|
||||
spendMap.set(`${vin.txid}:${vin.vout}`, tx.txid);
|
||||
}
|
||||
}
|
||||
|
||||
const relatives: Map<string, GraphTx> = new Map();
|
||||
const stack: string[] = [tx.txid];
|
||||
|
||||
// build set of same-block ancestors
|
||||
while (stack.length > 0) {
|
||||
const nextTxid = stack.pop();
|
||||
const nextTx = nextTxid ? blockTxs.get(nextTxid) : null;
|
||||
if (!nextTx || relatives.has(nextTx.txid)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const mempoolTx = convertToGraphTx(nextTx, spendMap);
|
||||
|
||||
for (const txid of [...mempoolTx.depends, ...mempoolTx.spentby]) {
|
||||
if (txid) {
|
||||
stack.push(txid);
|
||||
}
|
||||
}
|
||||
|
||||
relatives.set(mempoolTx.txid, mempoolTx);
|
||||
}
|
||||
|
||||
return relatives;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a raw transaction and converts it to GraphTx format
|
||||
* fee and ancestor data is initialized with dummy/null values
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
export function convertToGraphTx(tx: MempoolTransactionExtended, spendMap?: Map<string, MempoolTransactionExtended | string>): GraphTx {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: Math.max(tx.sigops * 5, Math.ceil(tx.weight / 4)),
|
||||
weight: tx.weight,
|
||||
fees: {
|
||||
base: tx.fee || 0,
|
||||
ancestor: tx.fee || 0,
|
||||
},
|
||||
depends: (tx.vin.map(vin => vin.txid).filter(depend => depend) as string[]),
|
||||
spentby: spendMap ? (tx.vout.map((vout, index) => { const spend = spendMap.get(`${tx.txid}:${index}`); return (spend?.['txid'] || spend); }).filter(spent => spent) as string[]) : [],
|
||||
|
||||
ancestorcount: 1,
|
||||
ancestorsize: Math.max(tx.sigops * 5, Math.ceil(tx.weight / 4)),
|
||||
ancestors: new Map<string, GraphTx>(),
|
||||
ancestorRate: 0,
|
||||
individualRate: 0,
|
||||
score: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a map of transaction ancestors, and expands it into a full graph of up to MAX_GRAPH_SIZE in-mempool relatives
|
||||
*/
|
||||
export function expandRelativesGraph(mempool: { [txid: string]: MempoolTransactionExtended }, ancestors: Map<string, GraphTx>, spendMap: Map<string, MempoolTransactionExtended>): Map<string, GraphTx> {
|
||||
const relatives: Map<string, GraphTx> = new Map();
|
||||
const stack: GraphTx[] = Array.from(ancestors.values());
|
||||
while (stack.length > 0) {
|
||||
if (relatives.size > MAX_RELATIVE_GRAPH_SIZE) {
|
||||
return relatives;
|
||||
}
|
||||
|
||||
const nextTx = stack.pop();
|
||||
if (!nextTx) {
|
||||
continue;
|
||||
}
|
||||
relatives.set(nextTx.txid, nextTx);
|
||||
|
||||
for (const relativeTxid of [...nextTx.depends, ...nextTx.spentby]) {
|
||||
if (relatives.has(relativeTxid)) {
|
||||
// already processed this tx
|
||||
continue;
|
||||
}
|
||||
let ancestorTx = ancestors.get(relativeTxid);
|
||||
if (!ancestorTx && relativeTxid in mempool) {
|
||||
const mempoolTx = mempool[relativeTxid];
|
||||
ancestorTx = convertToGraphTx(mempoolTx, spendMap);
|
||||
}
|
||||
if (ancestorTx) {
|
||||
stack.push(ancestorTx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relatives;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively traverses an in-mempool dependency graph, and sets a Map of in-mempool ancestors
|
||||
* for each transaction.
|
||||
*
|
||||
* @param tx
|
||||
* @param all
|
||||
*/
|
||||
function setAncestors(tx: GraphTx, all: Map<string, GraphTx>, visited: Map<string, Map<string, GraphTx>>, depth: number = 0): Map<string, GraphTx> {
|
||||
// sanity check for infinite recursion / too many ancestors (should never happen)
|
||||
if (depth > MAX_RELATIVE_GRAPH_SIZE) {
|
||||
logger.warn('cpfp dependency calculation failed: setAncestors reached depth of 100, unable to proceed');
|
||||
return tx.ancestors;
|
||||
}
|
||||
|
||||
// initialize the ancestor map for this tx
|
||||
tx.ancestors = new Map<string, GraphTx>();
|
||||
tx.depends.forEach(parentId => {
|
||||
const parent = all.get(parentId);
|
||||
if (parent) {
|
||||
// add the parent
|
||||
tx.ancestors?.set(parentId, parent);
|
||||
// check for a cached copy of this parent's ancestors
|
||||
let ancestors = visited.get(parent.txid);
|
||||
if (!ancestors) {
|
||||
// recursively fetch the parent's ancestors
|
||||
ancestors = setAncestors(parent, all, visited, depth + 1);
|
||||
}
|
||||
// and add to this tx's map
|
||||
ancestors.forEach((ancestor, ancestorId) => {
|
||||
tx.ancestors?.set(ancestorId, ancestor);
|
||||
});
|
||||
}
|
||||
});
|
||||
visited.set(tx.txid, tx.ancestors);
|
||||
|
||||
return tx.ancestors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Efficiently sets a Map of in-mempool ancestors for each member of an expanded relative graph
|
||||
* by running setAncestors on each leaf, and caching intermediate results.
|
||||
* then initializes ancestor data for each transaction
|
||||
*
|
||||
* @param all
|
||||
*/
|
||||
export function initializeRelatives(mempoolTxs: Map<string, GraphTx>): Map<string, GraphTx> {
|
||||
const visited: Map<string, Map<string, GraphTx>> = new Map();
|
||||
const leaves: GraphTx[] = Array.from(mempoolTxs.values()).filter(entry => entry.spentby.length === 0);
|
||||
for (const leaf of leaves) {
|
||||
setAncestors(leaf, mempoolTxs, visited);
|
||||
}
|
||||
mempoolTxs.forEach(entry => {
|
||||
entry.ancestors?.forEach(ancestor => {
|
||||
entry.ancestorcount++;
|
||||
entry.ancestorsize += ancestor.vsize;
|
||||
entry.fees.ancestor += ancestor.fees.base;
|
||||
});
|
||||
setAncestorScores(entry);
|
||||
});
|
||||
return mempoolTxs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a cluster of transactions from an in-mempool dependency graph
|
||||
* and update the survivors' scores and ancestors
|
||||
*
|
||||
* @param cluster
|
||||
* @param ancestors
|
||||
*/
|
||||
export function removeAncestors(cluster: Map<string, GraphTx>, all: Map<string, GraphTx>): void {
|
||||
// remove
|
||||
cluster.forEach(tx => {
|
||||
all.delete(tx.txid);
|
||||
});
|
||||
|
||||
// update survivors
|
||||
all.forEach(tx => {
|
||||
cluster.forEach(remove => {
|
||||
if (tx.ancestors?.has(remove.txid)) {
|
||||
// remove as dependency
|
||||
tx.ancestors.delete(remove.txid);
|
||||
tx.depends = tx.depends.filter(parent => parent !== remove.txid);
|
||||
// update ancestor sizes and fees
|
||||
tx.ancestorsize -= remove.vsize;
|
||||
tx.fees.ancestor -= remove.fees.base;
|
||||
}
|
||||
});
|
||||
// recalculate fee rates
|
||||
setAncestorScores(tx);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a mempool transaction, and set the fee rates and ancestor score
|
||||
*
|
||||
* @param tx
|
||||
*/
|
||||
export function setAncestorScores(tx: GraphTx): void {
|
||||
tx.individualRate = tx.fees.base / tx.vsize;
|
||||
tx.ancestorRate = tx.fees.ancestor / tx.ancestorsize;
|
||||
tx.score = Math.min(tx.individualRate, tx.ancestorRate);
|
||||
}
|
||||
|
||||
// Sort by descending score
|
||||
export function mempoolComparator(a: GraphTx, b: GraphTx): number {
|
||||
return b.score - a.score;
|
||||
}
|
||||
|
||||
/*
|
||||
* Build a block using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*/
|
||||
export function makeBlockTemplate(candidates: MempoolTransactionExtended[], accelerations: Acceleration[], maxBlocks: number = 8, weightLimit: number = BLOCK_WEIGHT_UNITS, sigopLimit: number = BLOCK_SIGOPS): TemplateTransaction[] {
|
||||
const auditPool: Map<string, MinerTransaction> = new Map();
|
||||
const mempoolArray: MinerTransaction[] = [];
|
||||
|
||||
candidates.forEach(tx => {
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
const adjustedVsize = Math.ceil(Math.max(tx.weight / 4, 5 * (tx.sigops || 0)));
|
||||
const feePerVsize = (tx.fee / adjustedVsize);
|
||||
auditPool.set(tx.txid, {
|
||||
txid: tx.txid,
|
||||
order: txidToOrdering(tx.txid),
|
||||
fee: tx.fee,
|
||||
feeDelta: 0,
|
||||
weight: tx.weight,
|
||||
adjustedVsize,
|
||||
feePerVsize: feePerVsize,
|
||||
effectiveFeePerVsize: feePerVsize,
|
||||
dependencyRate: feePerVsize,
|
||||
sigops: tx.sigops || 0,
|
||||
inputs: (tx.vin?.map(vin => vin.txid) || []) as string[],
|
||||
relativesSet: false,
|
||||
ancestors: [],
|
||||
cluster: [],
|
||||
ancestorMap: new Map<string, MinerTransaction>(),
|
||||
children: new Set<MinerTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorVsize: 0,
|
||||
ancestorSigops: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
});
|
||||
mempoolArray.push(auditPool.get(tx.txid) as MinerTransaction);
|
||||
});
|
||||
|
||||
// set accelerated effective fee
|
||||
for (const acceleration of accelerations) {
|
||||
const tx = auditPool.get(acceleration.txid);
|
||||
if (tx) {
|
||||
tx.feeDelta = acceleration.max_bid;
|
||||
tx.feePerVsize = ((tx.fee + tx.feeDelta) / tx.adjustedVsize);
|
||||
tx.effectiveFeePerVsize = tx.feePerVsize;
|
||||
tx.dependencyRate = tx.feePerVsize;
|
||||
}
|
||||
}
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort(priorityComparator);
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: number[][] = [];
|
||||
let blockWeight = 0;
|
||||
let blockSigops = 0;
|
||||
const transactions: MinerTransaction[] = [];
|
||||
let modified: MinerTransaction[] = [];
|
||||
const overflow: MinerTransaction[] = [];
|
||||
let failures = 0;
|
||||
while (mempoolArray.length || modified.length) {
|
||||
// skip invalid transactions
|
||||
while (mempoolArray[0].used || mempoolArray[0].modified) {
|
||||
mempoolArray.shift();
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[0];
|
||||
const nextModifiedTx = modified[0];
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
mempoolArray.shift();
|
||||
} else {
|
||||
modified.shift();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blocks.length >= (maxBlocks - 1) || ((blockWeight + (4 * nextTx.ancestorVsize) < weightLimit) && (blockSigops + nextTx.ancestorSigops <= sigopLimit))) {
|
||||
const ancestors: MinerTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const clusterTxids = sortedTxSet.map(tx => tx.txid);
|
||||
const effectiveFeeRate = Math.min(nextTx.dependencyRate || Infinity, nextTx.ancestorFee / nextTx.ancestorVsize);
|
||||
const used: MinerTransaction[] = [];
|
||||
while (sortedTxSet.length) {
|
||||
const ancestor = sortedTxSet.pop();
|
||||
if (!ancestor) {
|
||||
continue;
|
||||
}
|
||||
ancestor.used = true;
|
||||
ancestor.usedBy = nextTx.txid;
|
||||
// update this tx with effective fee rate & relatives data
|
||||
if (ancestor.effectiveFeePerVsize !== effectiveFeeRate) {
|
||||
ancestor.effectiveFeePerVsize = effectiveFeeRate;
|
||||
}
|
||||
ancestor.cluster = clusterTxids;
|
||||
transactions.push(ancestor);
|
||||
blockWeight += ancestor.weight;
|
||||
blockSigops += ancestor.sigops;
|
||||
used.push(ancestor);
|
||||
}
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (used.length) {
|
||||
used.forEach(tx => {
|
||||
modified = updateDescendants(tx, auditPool, modified, effectiveFeeRate);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (weightLimit - 4000);
|
||||
const queueEmpty = !mempoolArray.length && !modified.length;
|
||||
|
||||
if (exceededPackageTries || queueEmpty) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const tx of transactions) {
|
||||
tx.ancestors = Object.values(tx.ancestorMap);
|
||||
}
|
||||
|
||||
return transactions;
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
function setRelatives(
|
||||
tx: MinerTransaction,
|
||||
mempool: Map<string, MinerTransaction>,
|
||||
): void {
|
||||
for (const parent of tx.inputs) {
|
||||
const parentTx = mempool.get(parent);
|
||||
if (parentTx && !tx.ancestorMap?.has(parent)) {
|
||||
tx.ancestorMap.set(parent, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = (tx.fee + tx.feeDelta);
|
||||
tx.ancestorVsize = tx.adjustedVsize || 0;
|
||||
tx.ancestorSigops = tx.sigops || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += (ancestor.fee + ancestor.feeDelta);
|
||||
tx.ancestorVsize += ancestor.adjustedVsize;
|
||||
tx.ancestorSigops += ancestor.sigops;
|
||||
});
|
||||
tx.score = tx.ancestorFee / tx.ancestorVsize;
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
function updateDescendants(
|
||||
rootTx: MinerTransaction,
|
||||
mempool: Map<string, MinerTransaction>,
|
||||
modified: MinerTransaction[],
|
||||
clusterRate: number,
|
||||
): MinerTransaction[] {
|
||||
const descendantSet: Set<MinerTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: MinerTransaction[] = [];
|
||||
let descendantTx: MinerTransaction | undefined;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= (rootTx.fee + rootTx.feeDelta);
|
||||
descendantTx.ancestorVsize -= rootTx.adjustedVsize;
|
||||
descendantTx.ancestorSigops -= rootTx.sigops;
|
||||
descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorVsize;
|
||||
descendantTx.dependencyRate = descendantTx.dependencyRate ? Math.min(descendantTx.dependencyRate, clusterRate) : clusterRate;
|
||||
|
||||
if (!descendantTx.modified) {
|
||||
descendantTx.modified = true;
|
||||
modified.push(descendantTx);
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// return new, resorted modified list
|
||||
return modified.sort(priorityComparator);
|
||||
}
|
||||
|
||||
// Used to sort an array of MinerTransactions by descending ancestor score
|
||||
function priorityComparator(a: MinerTransaction, b: MinerTransaction): number {
|
||||
if (b.score === a.score) {
|
||||
// tie-break by txid for stability
|
||||
return a.order - b.order;
|
||||
} else {
|
||||
return b.score - a.score;
|
||||
}
|
||||
}
|
||||
|
||||
// returns the most significant 4 bytes of the txid as an integer
|
||||
function txidToOrdering(txid: string): number {
|
||||
return parseInt(
|
||||
txid.substring(62, 64) +
|
||||
txid.substring(60, 62) +
|
||||
txid.substring(58, 60) +
|
||||
txid.substring(56, 58),
|
||||
16
|
||||
);
|
||||
}
|
|
@ -103,7 +103,7 @@ class TransactionUtils {
|
|||
}
|
||||
const feePerVbytes = (transaction.fee || 0) / (transaction.weight / 4);
|
||||
const transactionExtended: TransactionExtended = Object.assign({
|
||||
vsize: Math.round(transaction.weight / 4),
|
||||
vsize: transaction.weight / 4,
|
||||
feePerVsize: feePerVbytes,
|
||||
effectiveFeePerVsize: feePerVbytes,
|
||||
}, transaction);
|
||||
|
@ -123,7 +123,7 @@ class TransactionUtils {
|
|||
const adjustedFeePerVsize = (transaction.fee || 0) / adjustedVsize;
|
||||
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
|
||||
order: this.txidToOrdering(transaction.txid),
|
||||
vsize: Math.round(transaction.weight / 4),
|
||||
vsize,
|
||||
adjustedVsize,
|
||||
sigops,
|
||||
feePerVsize: feePerVbytes,
|
||||
|
|
|
@ -33,7 +33,7 @@ interface AddressTransactions {
|
|||
removed: MempoolTransactionExtended[],
|
||||
}
|
||||
import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
|
||||
import { calculateCpfp } from './cpfp';
|
||||
import { calculateMempoolTxCpfp } from './cpfp';
|
||||
|
||||
// valid 'want' subscriptions
|
||||
const wantable = [
|
||||
|
@ -827,7 +827,7 @@ class WebsocketHandler {
|
|||
accelerationPositions: memPool.getAccelerationPositions(mempoolTx.txid),
|
||||
};
|
||||
if (!mempoolTx.cpfpChecked && !mempoolTx.acceleration) {
|
||||
calculateCpfp(mempoolTx, newMempool);
|
||||
calculateMempoolTxCpfp(mempoolTx, newMempool);
|
||||
}
|
||||
if (mempoolTx.cpfpDirty) {
|
||||
positionData['cpfp'] = {
|
||||
|
@ -866,7 +866,7 @@ class WebsocketHandler {
|
|||
acceleratedAt: mempoolTx.acceleratedAt || undefined,
|
||||
};
|
||||
if (!mempoolTx.cpfpChecked) {
|
||||
calculateCpfp(mempoolTx, newMempool);
|
||||
calculateMempoolTxCpfp(mempoolTx, newMempool);
|
||||
}
|
||||
if (mempoolTx.cpfpDirty) {
|
||||
txInfo.cpfp = {
|
||||
|
|
|
@ -223,6 +223,7 @@ export interface CpfpInfo {
|
|||
sigops?: number;
|
||||
adjustedVsize?: number,
|
||||
acceleration?: boolean,
|
||||
fee?: number;
|
||||
}
|
||||
|
||||
export interface TransactionStripped {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { AccelerationInfo, makeBlockTemplate } from '../api/acceleration/acceleration';
|
||||
import { AccelerationInfo } from '../api/acceleration/acceleration';
|
||||
import { RowDataPacket } from 'mysql2';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
@ -11,6 +11,7 @@ import accelerationCosts from '../api/acceleration/acceleration';
|
|||
import bitcoinApi from '../api/bitcoin/bitcoin-api-factory';
|
||||
import transactionUtils from '../api/transaction-utils';
|
||||
import { BlockExtended, MempoolTransactionExtended } from '../mempool.interfaces';
|
||||
import { makeBlockTemplate } from '../api/mini-miner';
|
||||
|
||||
export interface PublicAcceleration {
|
||||
txid: string,
|
||||
|
|
|
@ -114,6 +114,43 @@ class BlocksSummariesRepository {
|
|||
return [];
|
||||
}
|
||||
|
||||
public async $getSummariesBelowVersion(version: number): Promise<{ height: number, id: string, version: number }[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT
|
||||
height,
|
||||
id,
|
||||
version
|
||||
FROM blocks_summaries
|
||||
WHERE version < ?
|
||||
ORDER BY height DESC;`, [version]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summaries below version. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
public async $getTemplatesBelowVersion(version: number): Promise<{ height: number, id: string, version: number }[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT
|
||||
blocks_summaries.height as height,
|
||||
blocks_templates.id as id,
|
||||
blocks_templates.version as version
|
||||
FROM blocks_templates
|
||||
JOIN blocks_summaries ON blocks_templates.id = blocks_summaries.id
|
||||
WHERE blocks_templates.version < ?
|
||||
ORDER BY height DESC;`, [version]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summaries below version. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fee percentiles if the block has already been indexed, [] otherwise
|
||||
*
|
||||
|
|
|
@ -91,6 +91,26 @@ class CpfpRepository {
|
|||
return;
|
||||
}
|
||||
|
||||
public async $getClustersAt(height: number): Promise<CpfpCluster[]> {
|
||||
const [clusterRows]: any = await DB.query(
|
||||
`
|
||||
SELECT *
|
||||
FROM compact_cpfp_clusters
|
||||
WHERE height = ?
|
||||
`,
|
||||
[height]
|
||||
);
|
||||
return clusterRows.map(cluster => {
|
||||
if (cluster?.txs) {
|
||||
cluster.effectiveFeePerVsize = cluster.fee_rate;
|
||||
cluster.txs = this.unpack(cluster.txs);
|
||||
return cluster;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}).filter(cluster => cluster !== null);
|
||||
}
|
||||
|
||||
public async $deleteClustersFrom(height: number): Promise<void> {
|
||||
logger.info(`Delete newer cpfp clusters from height ${height} from the database`);
|
||||
try {
|
||||
|
@ -122,6 +142,37 @@ class CpfpRepository {
|
|||
}
|
||||
}
|
||||
|
||||
public async $deleteClustersAt(height: number): Promise<void> {
|
||||
logger.info(`Delete cpfp clusters at height ${height} from the database`);
|
||||
try {
|
||||
const [rows] = await DB.query(
|
||||
`
|
||||
SELECT txs, height, root from compact_cpfp_clusters
|
||||
WHERE height = ?
|
||||
`,
|
||||
[height]
|
||||
) as RowDataPacket[][];
|
||||
if (rows?.length) {
|
||||
for (const clusterToDelete of rows) {
|
||||
const txs = this.unpack(clusterToDelete?.txs);
|
||||
for (const tx of txs) {
|
||||
await transactionRepository.$removeTransaction(tx.txid);
|
||||
}
|
||||
}
|
||||
}
|
||||
await DB.query(
|
||||
`
|
||||
DELETE from compact_cpfp_clusters
|
||||
WHERE height = ?
|
||||
`,
|
||||
[height]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete cpfp clusters from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// insert a dummy row to mark that we've indexed as far as this block
|
||||
public async $insertProgressMarker(height: number): Promise<void> {
|
||||
try {
|
||||
|
@ -190,6 +241,32 @@ class CpfpRepository {
|
|||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// returns `true` if two sets of CPFP clusters are deeply identical
|
||||
public compareClusters(clustersA: CpfpCluster[], clustersB: CpfpCluster[]): boolean {
|
||||
if (clustersA.length !== clustersB.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
clustersA = clustersA.sort((a,b) => a.root.localeCompare(b.root));
|
||||
clustersB = clustersB.sort((a,b) => a.root.localeCompare(b.root));
|
||||
|
||||
for (let i = 0; i < clustersA.length; i++) {
|
||||
if (clustersA[i].root !== clustersB[i].root) {
|
||||
return false;
|
||||
}
|
||||
if (clustersA[i].txs.length !== clustersB[i].txs.length) {
|
||||
return false;
|
||||
}
|
||||
for (let j = 0; j < clustersA[i].txs.length; j++) {
|
||||
if (clustersA[i].txs[j].txid !== clustersB[i].txs[j].txid) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export default new CpfpRepository();
|
|
@ -201,7 +201,7 @@
|
|||
<span i18n="address.error.loading-address-data">Error loading address data.</span>
|
||||
<br>
|
||||
<ng-container i18n="Electrum server limit exceeded error">
|
||||
<i>There many transactions on this address, more than your backend can handle. See more on <a href="/docs/faq#address-lookup-issues">setting up a stronger backend</a>.</i>
|
||||
<i>There are too many transactions on this address, more than your backend can handle. See more on <a href="/docs/faq#address-lookup-issues">setting up a stronger backend</a>.</i>
|
||||
<br><br>
|
||||
Consider viewing this address on the official Mempool website instead:
|
||||
</ng-container>
|
||||
|
|
|
@ -68,7 +68,7 @@ export class BlockOverviewTooltipComponent implements OnChanges {
|
|||
this.effectiveRate = this.tx.rate;
|
||||
const txFlags = BigInt(this.tx.flags) || 0n;
|
||||
this.acceleration = this.tx.acc || (txFlags & TransactionFlags.acceleration);
|
||||
this.hasEffectiveRate = this.tx.acc || Math.abs((this.fee / this.vsize) - this.effectiveRate) > 0.05
|
||||
this.hasEffectiveRate = this.tx.acc || !(Math.abs((this.fee / this.vsize) - this.effectiveRate) <= 0.1 && Math.abs((this.fee / Math.ceil(this.vsize)) - this.effectiveRate) <= 0.1)
|
||||
|| (txFlags && (txFlags & (TransactionFlags.cpfp_child | TransactionFlags.cpfp_parent)) > 0n);
|
||||
this.filters = this.tx.flags ? toFilters(txFlags).filter(f => f.tooltip) : [];
|
||||
this.activeFilters = {}
|
||||
|
|
|
@ -599,7 +599,7 @@ export class TransactionComponent implements OnInit, AfterViewInit, OnDestroy {
|
|||
bestDescendant: tx.bestDescendant,
|
||||
});
|
||||
const hasRelatives = !!(tx.ancestors?.length || tx.bestDescendant);
|
||||
this.hasEffectiveFeeRate = hasRelatives || (tx.effectiveFeePerVsize && (Math.abs(tx.effectiveFeePerVsize - tx.feePerVsize) > 0.01));
|
||||
this.hasEffectiveFeeRate = hasRelatives || (tx.effectiveFeePerVsize && (Math.abs(tx.effectiveFeePerVsize - tx.feePerVsize) >= 0.1));
|
||||
} else {
|
||||
this.fetchCpfp$.next(this.tx.txid);
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue