Merge branch 'master' into hunicus/manual-deployment-enterprise

This commit is contained in:
softsimon 2023-07-19 14:19:03 +09:00 committed by GitHub
commit 2fffd8b43c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
413 changed files with 46412 additions and 22243 deletions

View file

@ -9,7 +9,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.14.1"]
node: ["16", "17", "18"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@ -27,6 +27,11 @@ jobs:
node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: 1.70
- name: Install
if: ${{ matrix.flavor == 'dev'}}
run: npm ci
@ -55,7 +60,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.14.1"]
node: ["16", "17", "18"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"

26
.github/workflows/get_image_digest.yml vendored Normal file
View file

@ -0,0 +1,26 @@
name: 'Print images digest'
on:
workflow_dispatch:
inputs:
version:
description: 'Image Version'
required: false
default: 'latest'
type: string
jobs:
print-images-sha:
runs-on: 'ubuntu-latest'
name: Print digest for images
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: digest
- name: Run script
working-directory: digest
run: |
sh ./docker/scripts/get_image_digest.sh $VERSION
env:
VERSION: ${{ github.event.inputs.version }}

1
.gitignore vendored
View file

@ -5,3 +5,4 @@ backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js
target

View file

@ -1,5 +1,6 @@
{
"editor.tabSize": 2,
"typescript.preferences.importModuleSpecifier": "relative",
"typescript.tsdk": "./backend/node_modules/typescript/lib"
"typescript.tsdk": "./backend/node_modules/typescript/lib",
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] }
}

533
Cargo.lock generated Normal file
View file

@ -0,0 +1,533 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1586fa608b1dab41f667475b4a41faec5ba680aee428bfa5de4ea520fdc6e901"
dependencies = [
"quote",
"syn 2.0.20",
]
[[package]]
name = "gbt"
version = "0.1.0"
dependencies = [
"bytemuck",
"bytes",
"napi",
"napi-build",
"napi-derive",
"priority-queue",
"tracing",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
]
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "napi"
version = "2.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
"tokio",
]
[[package]]
name = "napi-build"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
[[package]]
name = "napi-derive"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20bbc7c69168d06a848f925ec5f0e0997f98e8c8d4f2cc30157f0da51c009e17"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"regex",
"semver",
"syn 1.0.109",
]
[[package]]
name = "napi-sys"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
dependencies = [
"libloading",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num_cpus"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
name = "priority-queue"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61"
dependencies = [
"autocfg",
"indexmap",
]
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.2",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "sharded-slab"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
dependencies = [
"lazy_static",
]
[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tokio"
version = "1.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
dependencies = [
"autocfg",
"num_cpus",
"pin-project-lite",
"windows-sys",
]
[[package]]
name = "tracing"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.20",
]
[[package]]
name = "tracing-core"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

8
Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[workspace]
members = [
"./backend/rust-gbt",
]
[profile.release]
lto = true
codegen-units = 1

View file

@ -1,13 +1,13 @@
# The Mempool Open Source Project™ [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
https://user-images.githubusercontent.com/232186/222445818-234aa6c9-c233-4c52-b3f0-e32b8232893b.mp4
https://user-images.githubusercontent.com/93150691/226236121-375ea64f-b4a1-4cc0-8fad-a6fb33226840.mp4
<br>
Mempool is the fully-featured mempool visualizer, explorer, and API service running at [mempool.space](https://mempool.space/).
It is an open-source project developed and operated for the benefit of the Bitcoin community, with a focus on the emerging transaction fee market that is evolving Bitcoin into a multi-layer ecosystem.
![mempool](https://mempool.space/resources/screenshots/v2.4.0-dashboard.png)
# Installation Methods
Mempool can be self-hosted on a wide variety of your own hardware, ranging from a simple one-click installation on a Raspberry Pi full-node distro all the way to a robust production instance on a powerful FreeBSD server.

View file

@ -79,6 +79,8 @@ Query OK, 0 rows affected (0.00 sec)
_Make sure to use Node.js 16.10 and npm 7._
_The build process requires [Rust](https://www.rust-lang.org/tools/install) to be installed._
Install dependencies with `npm` and build the backend:
```

View file

@ -27,13 +27,18 @@
"AUDIT": false,
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false
"RUST_GBT": false,
"CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
},
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 60000
},
"ELECTRUM": {
"HOST": "127.0.0.1",
@ -41,13 +46,16 @@
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-bitcoin-mainnet",
"RETRY_UNIX_SOCKET_AFTER": 30000
},
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 60000
},
"DATABASE": {
"ENABLED": true,
@ -56,7 +64,8 @@
"SOCKET": "/var/run/mysql/mysql.sock",
"DATABASE": "mempool",
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 180000
},
"SYSLOG": {
"ENABLED": true,
@ -91,7 +100,8 @@
"LND": {
"TLS_CERT_PATH": "tls.cert",
"MACAROON_PATH": "readonly.macaroon",
"REST_API_URL": "https://localhost:8080"
"REST_API_URL": "https://localhost:8080",
"TIMEOUT": 10000
},
"CLIGHTNING": {
"SOCKET": "lightning-rpc"
@ -115,5 +125,16 @@
"LIQUID_ONION": "http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1",
"BISQ_URL": "https://bisq.markets/api",
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": [
"list",
"of",
"trusted",
"servers"
]
}
}

7121
backend/package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "mempool-backend",
"version": "2.5.0-dev",
"version": "3.0.0-dev",
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
"license": "GNU Affero General Public License v3.0",
"homepage": "https://mempool.space",
@ -22,47 +22,50 @@
"main": "index.ts",
"scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run tsc && npm run create-resources",
"build": "npm run build-rust && npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && mv rust-gbt package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint @napi-rs ../rust-gbt/target ../rust-gbt/node_modules ../rust-gbt/src)",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js",
"reindex-updated-pools": "npm run start-production --update-pools",
"reindex-all-blocks": "npm run start-production --update-pools --reindex-blocks",
"test": "./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
"build-rust": "cd rust-gbt && npm install"
},
"dependencies": {
"@babel/core": "^7.20.12",
"@mempool/electrum-client": "^1.1.7",
"@types/node": "^16.18.11",
"axios": "~0.27.2",
"bitcoinjs-lib": "~6.1.0",
"@babel/core": "^7.21.3",
"@mempool/electrum-client": "1.1.9",
"@types/node": "^18.15.3",
"axios": "~1.4.0",
"bitcoinjs-lib": "~6.1.3",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.8",
"mysql2": "~2.3.3",
"node-worker-threads-pool": "~1.5.1",
"maxmind": "~4.3.11",
"mysql2": "~3.5.2",
"rust-gbt": "file:./rust-gbt",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4",
"ws": "~8.11.0"
"typescript": "~4.9.3",
"ws": "~8.13.0"
},
"devDependencies": {
"@babel/core": "^7.20.7",
"@babel/core": "^7.21.3",
"@babel/code-frame": "^7.18.6",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.15",
"@types/jest": "^29.2.5",
"@types/ws": "~8.5.4",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.5.0",
"jest": "^29.3.1",
"prettier": "^2.8.2",
"ts-jest": "^29.0.3",
"@types/express": "^4.17.17",
"@types/jest": "^29.5.0",
"@types/ws": "~8.5.5",
"@typescript-eslint/eslint-plugin": "^5.55.0",
"@typescript-eslint/parser": "^5.55.0",
"eslint": "^8.36.0",
"eslint-config-prettier": "^8.8.0",
"jest": "^29.5.0",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1"
}
}

4
backend/rust-gbt/.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
*.node
**/node_modules
**/.DS_Store
npm-debug.log*

View file

@ -0,0 +1,25 @@
[package]
name = "gbt"
version = "0.1.0"
description = "An inefficient re-implementation of the getBlockTemplate algorithm in Rust"
authors = ["mononaut"]
edition = "2021"
publish = false
[lib]
crate-type = ["cdylib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
priority-queue = "1.3.2"
bytes = "1.4.0"
napi = { version = "2.13.2", features = ["napi8", "tokio_rt"] }
napi-derive = "2.13.0"
bytemuck = "1.13.1"
tracing = "0.1.36"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.15", features = ["env-filter"]}
[build-dependencies]
napi-build = "2.0.1"

123
backend/rust-gbt/README.md Normal file
View file

@ -0,0 +1,123 @@
# gbt
**gbt:** rust implementation of the getBlockTemplate algorithm
This project was bootstrapped by [napi](https://www.npmjs.com/package/@napi-rs/cli).
## Installing gbt
Installing gbt requires a [supported version of Node and Rust](https://github.com/napi-rs/napi-rs#platform-support).
The build process also requires [Rust](https://www.rust-lang.org/tools/install) to be installed.
You can install the project with npm. In the project directory, run:
```sh
$ npm install
```
This fully installs the project, including installing any dependencies and running the build.
## Building gbt
If you have already installed the project and only want to run the build, run:
```sh
$ npm run build
```
This command uses the [napi build](https://www.npmjs.com/package/@napi-rs/cli) utility to run the Rust build and copy the built library into `./gbt.[TARGET_TRIPLE].node`.
## Exploring gbt
After building gbt, you can explore its exports at the Node REPL:
```sh
$ npm install
$ node
> require('.').hello()
"hello node"
```
## Available Scripts
In the project directory, you can run:
### `npm install`
Installs the project, including running `npm run build-release`.
### `npm build`
Builds the Node addon (`gbt.[TARGET_TRIPLE].node`) from source.
Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm build` and `npm build-*` commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html):
```
npm run build -- --feature=beetle
```
#### `npm build-debug`
Alias for `npm build`.
#### `npm build-release`
Same as [`npm build`](#npm-build) but, builds the module with the [`release`](https://doc.rust-lang.org/cargo/reference/profiles.html#release) profile. Release builds will compile slower, but run faster.
### `npm test`
Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/).
## Project Layout
The directory structure of this project is:
```
gbt/
├── Cargo.toml
├── README.md
├── gbt.[TARGET_TRIPLE].node
├── package.json
├── src/
| └── lib.rs
└── target/
```
### Cargo.toml
The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command.
### README.md
This file.
### gbt.\[TARGET_TRIPLE\].node
The Node addon—i.e., a binary Node module—generated by building the project. This is the main module for this package, as dictated by the `"main"` key in `package.json`.
Under the hood, a [Node addon](https://nodejs.org/api/addons.html) is a [dynamically-linked shared object](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries). The `"build"` script produces this file by copying it from within the `target/` directory, which is where the Rust build produces the shared object.
### package.json
The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command.
### src/
The directory tree containing the Rust source code for the project.
### src/lib.rs
The Rust library's main module.
### target/
Binary artifacts generated by the Rust build.
## Learn More
To learn more about Neon, see the [Napi-RS documentation](https://napi.rs/docs/introduction/getting-started).
To learn more about Rust, see the [Rust documentation](https://www.rust-lang.org).
To learn more about Node, see the [Node documentation](https://nodejs.org).

View file

@ -0,0 +1,3 @@
fn main() {
napi_build::setup();
}

45
backend/rust-gbt/index.d.ts vendored Normal file
View file

@ -0,0 +1,45 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export interface ThreadTransaction {
uid: number
order: number
fee: number
weight: number
sigops: number
effectiveFeePerVsize: number
inputs: Array<number>
}
export class GbtGenerator {
constructor()
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
make(mempool: Array<ThreadTransaction>, maxUid: number): Promise<GbtResult>
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, maxUid: number): Promise<GbtResult>
}
/**
* The result from calling the gbt function.
*
* This tuple contains the following:
* blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
* block_weights: A Vector of total weights per block.
* clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
* rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
*/
export class GbtResult {
blocks: Array<Array<number>>
blockWeights: Array<number>
clusters: Array<Array<number>>
rates: Array<Array<number>>
constructor(blocks: Array<Array<number>>, blockWeights: Array<number>, clusters: Array<Array<number>>, rates: Array<Array<number>>)
}

258
backend/rust-gbt/index.js Normal file
View file

@ -0,0 +1,258 @@
/* tslint:disable */
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
let nativeBinding = null
let localFileExisted = false
let loadError = null
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm64.node')
} else {
nativeBinding = require('gbt-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm-eabi.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm-eabi.node')
} else {
nativeBinding = require('gbt-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-x64-msvc.node')
} else {
nativeBinding = require('gbt-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-ia32-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-ia32-msvc.node')
} else {
nativeBinding = require('gbt-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-arm64-msvc.node')
} else {
nativeBinding = require('gbt-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-universal.node')
} else {
nativeBinding = require('gbt-darwin-universal')
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-x64.node')
} else {
nativeBinding = require('gbt-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-arm64.node')
} else {
nativeBinding = require('gbt-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'gbt.freebsd-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.freebsd-x64.node')
} else {
nativeBinding = require('gbt-freebsd-x64')
}
} catch (e) {
loadError = e
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-musl.node')
} else {
nativeBinding = require('gbt-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-gnu.node')
} else {
nativeBinding = require('gbt-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-musl.node')
} else {
nativeBinding = require('gbt-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-gnu.node')
} else {
nativeBinding = require('gbt-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('gbt-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
throw loadError
}
throw new Error(`Failed to load native binding`)
}
const { GbtGenerator, GbtResult } = nativeBinding
module.exports.GbtGenerator = GbtGenerator
module.exports.GbtResult = GbtResult

34
backend/rust-gbt/package-lock.json generated Normal file
View file

@ -0,0 +1,34 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "gbt",
"version": "3.0.0-dev",
"hasInstallScript": true,
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"bin": {
"napi": "scripts/index.js"
},
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
}
}
}

View file

@ -0,0 +1,33 @@
{
"name": "gbt",
"version": "3.0.0-dev",
"description": "An inefficient re-implementation of the getBlockTemplate algorithm in Rust",
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform",
"build-debug": "npm run build",
"build-release": "npm run build -- --release --strip",
"install": "npm run build-release",
"prepublishOnly": "napi prepublish -t npm",
"test": "cargo test"
},
"author": "mononaut",
"napi": {
"name": "gbt",
"triples": {
"defaults": false,
"additional": [
"x86_64-unknown-linux-gnu",
"x86_64-unknown-freebsd"
]
}
},
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
}

View file

@ -0,0 +1,220 @@
use crate::{
u32_hasher_types::{u32hashset_new, U32HasherState},
ThreadTransaction,
};
use std::{
cmp::Ordering,
collections::HashSet,
hash::{Hash, Hasher},
};
#[allow(clippy::struct_excessive_bools)]
#[derive(Clone, Debug)]
pub struct AuditTransaction {
pub uid: u32,
order: u32,
pub fee: u64,
pub weight: u32,
// exact sigop-adjusted weight
pub sigop_adjusted_weight: u32,
// sigop-adjusted vsize rounded up the the next integer
pub sigop_adjusted_vsize: u32,
pub sigops: u32,
adjusted_fee_per_vsize: f64,
pub effective_fee_per_vsize: f64,
pub dependency_rate: f64,
pub inputs: Vec<u32>,
pub relatives_set_flag: bool,
pub ancestors: HashSet<u32, U32HasherState>,
pub children: HashSet<u32, U32HasherState>,
ancestor_fee: u64,
ancestor_sigop_adjusted_weight: u32,
ancestor_sigop_adjusted_vsize: u32,
ancestor_sigops: u32,
// Safety: Must be private to prevent NaN breaking Ord impl.
score: f64,
pub used: bool,
/// whether this transaction has been moved to the "modified" priority queue
pub modified: bool,
pub dirty: bool,
}
impl Hash for AuditTransaction {
fn hash<H: Hasher>(&self, state: &mut H) {
self.uid.hash(state);
}
}
impl PartialEq for AuditTransaction {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for AuditTransaction {}
#[inline]
pub fn partial_cmp_uid_score(a: (u32, u32, f64), b: (u32, u32, f64)) -> Option<Ordering> {
// If either score is NaN, this is false,
// and partial_cmp will return None
if a.2 != b.2 {
// compare by score (sorts by ascending score)
a.2.partial_cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by comparing partial txids (sorts by descending txid)
Some(b.1.cmp(&a.1))
} else {
// tie-break partial txid collisions by comparing uids (sorts by descending uid)
Some(b.0.cmp(&a.0))
}
}
impl PartialOrd for AuditTransaction {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for AuditTransaction {
fn cmp(&self, other: &Self) -> Ordering {
// Safety: The only possible values for score are f64
// that are not NaN. This is because outside code can not
// freely assign score. Also, calc_new_score guarantees no NaN.
self.partial_cmp(other).expect("score will never be NaN")
}
}
#[inline]
fn calc_fee_rate(fee: f64, vsize: f64) -> f64 {
fee / (if vsize == 0.0 { 1.0 } else { vsize })
}
impl AuditTransaction {
pub fn from_thread_transaction(tx: &ThreadTransaction) -> Self {
// rounded up to the nearest integer
let is_adjusted = tx.weight < (tx.sigops * 20);
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
let effective_fee_per_vsize = if is_adjusted {
calc_fee_rate(tx.fee, f64::from(sigop_adjusted_weight) / 4.0)
} else {
tx.effective_fee_per_vsize
};
Self {
uid: tx.uid,
order: tx.order,
fee: tx.fee as u64,
weight: tx.weight,
sigop_adjusted_weight,
sigop_adjusted_vsize,
sigops: tx.sigops,
adjusted_fee_per_vsize: calc_fee_rate(tx.fee, f64::from(sigop_adjusted_vsize)),
effective_fee_per_vsize,
dependency_rate: f64::INFINITY,
inputs: tx.inputs.clone(),
relatives_set_flag: false,
ancestors: u32hashset_new(),
children: u32hashset_new(),
ancestor_fee: tx.fee as u64,
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
ancestor_sigops: tx.sigops,
score: 0.0,
used: false,
modified: false,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize,
}
}
#[inline]
pub const fn score(&self) -> f64 {
self.score
}
#[inline]
pub const fn order(&self) -> u32 {
self.order
}
#[inline]
pub const fn ancestor_sigop_adjusted_vsize(&self) -> u32 {
self.ancestor_sigop_adjusted_vsize
}
#[inline]
pub const fn ancestor_sigops(&self) -> u32 {
self.ancestor_sigops
}
#[inline]
pub fn cluster_rate(&self) -> f64 {
// Safety: self.ancestor_weight can never be 0.
// Even if it could, as it approaches 0, the value inside the min() call
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
// the smaller of the two. If either side is NaN, the other side is returned.
self.dependency_rate.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
))
}
pub fn set_dirty_if_different(&mut self, cluster_rate: f64) {
if self.effective_fee_per_vsize != cluster_rate {
self.effective_fee_per_vsize = cluster_rate;
self.dirty = true;
}
}
/// Safety: This function must NEVER set score to NaN.
#[inline]
fn calc_new_score(&mut self) {
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_vsize),
));
}
#[inline]
pub fn set_ancestors(
&mut self,
ancestors: HashSet<u32, U32HasherState>,
total_fee: u64,
total_sigop_adjusted_weight: u32,
total_sigop_adjusted_vsize: u32,
total_sigops: u32,
) {
self.ancestors = ancestors;
self.ancestor_fee = self.fee + total_fee;
self.ancestor_sigop_adjusted_weight =
self.sigop_adjusted_weight + total_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize = self.sigop_adjusted_vsize + total_sigop_adjusted_vsize;
self.ancestor_sigops = self.sigops + total_sigops;
self.calc_new_score();
self.relatives_set_flag = true;
}
#[inline]
pub fn remove_root(
&mut self,
root_txid: u32,
root_fee: u64,
root_sigop_adjusted_weight: u32,
root_sigop_adjusted_vsize: u32,
root_sigops: u32,
cluster_rate: f64,
) -> f64 {
let old_score = self.score();
self.dependency_rate = self.dependency_rate.min(cluster_rate);
if self.ancestors.remove(&root_txid) {
self.ancestor_fee -= root_fee;
self.ancestor_sigop_adjusted_weight -= root_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize -= root_sigop_adjusted_vsize;
self.ancestor_sigops -= root_sigops;
self.calc_new_score();
}
old_score
}
}

421
backend/rust-gbt/src/gbt.rs Normal file
View file

@ -0,0 +1,421 @@
use priority_queue::PriorityQueue;
use std::{cmp::Ordering, collections::HashSet, mem::ManuallyDrop};
use tracing::{info, trace};
use crate::{
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
GbtResult, ThreadTransactionsMap,
};
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
const BLOCK_SIGOPS: u32 = 80_000;
const BLOCK_RESERVED_WEIGHT: u32 = 4_000;
const BLOCK_RESERVED_SIGOPS: u32 = 400;
const MAX_BLOCKS: usize = 8;
type AuditPool = Vec<Option<ManuallyDrop<AuditTransaction>>>;
type ModifiedQueue = PriorityQueue<u32, TxPriority, U32HasherState>;
#[derive(Debug)]
struct TxPriority {
uid: u32,
order: u32,
score: f64,
}
impl PartialEq for TxPriority {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for TxPriority {}
impl PartialOrd for TxPriority {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for TxPriority {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).expect("score will never be NaN")
}
}
/// Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core.
///
/// See `BlockAssembler` in Bitcoin Core's
/// [miner.cpp](https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp).
/// Ported from mempool backend's
/// [tx-selection-worker.ts](https://github.com/mempool/mempool/blob/master/backend/src/api/tx-selection-worker.ts).
//
// TODO: Make gbt smaller to fix these lints.
#[allow(clippy::too_many_lines)]
#[allow(clippy::cognitive_complexity)]
pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
let mempool_len = mempool.len();
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
audit_pool.resize(max_uid + 1, None);
let mut mempool_stack: Vec<u32> = Vec::with_capacity(mempool_len);
let mut clusters: Vec<Vec<u32>> = Vec::new();
let mut block_weights: Vec<u32> = Vec::new();
info!("Initializing working structs");
for (uid, tx) in &mut *mempool {
let audit_tx = AuditTransaction::from_thread_transaction(tx);
// Safety: audit_pool and mempool_stack must always contain the same transactions
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
mempool_stack.push(*uid);
}
info!("Building relatives graph & calculate ancestor scores");
for txid in &mempool_stack {
set_relatives(*txid, &mut audit_pool);
}
trace!("Post relative graph Audit Pool: {:#?}", audit_pool);
info!("Sorting by descending ancestor score");
let mut mempool_stack: Vec<(u32, u32, f64)> = mempool_stack
.into_iter()
.map(|txid| {
let atx = audit_pool
.get(txid as usize)
.and_then(Option::as_ref)
.expect("All txids are from audit_pool");
(txid, atx.order(), atx.score())
})
.collect();
mempool_stack.sort_unstable_by(|a, b| partial_cmp_uid_score(*a, *b).expect("Not NaN"));
let mut mempool_stack: Vec<u32> = mempool_stack.into_iter().map(|(txid, _, _)| txid).collect();
info!("Building blocks by greedily choosing the highest feerate package");
info!("(i.e. the package rooted in the transaction with the best ancestor score)");
let mut blocks: Vec<Vec<u32>> = Vec::new();
let mut block_weight: u32 = BLOCK_RESERVED_WEIGHT;
let mut block_sigops: u32 = BLOCK_RESERVED_SIGOPS;
// No need to be bigger than 4096 transactions for the per-block transaction Vec.
let initial_txes_per_block: usize = 4096.min(mempool_len);
let mut transactions: Vec<u32> = Vec::with_capacity(initial_txes_per_block);
let mut modified: ModifiedQueue = u32priority_queue_with_capacity(mempool_len);
let mut overflow: Vec<u32> = Vec::new();
let mut failures = 0;
while !mempool_stack.is_empty() || !modified.is_empty() {
// This trace log storm is big, so to make scrolling through
// Each iteration easier, leaving a bunch of empty rows
// And a header of ======
trace!("\n\n\n\n\n\n\n\n\n\n==================================");
trace!("mempool_array: {:#?}", mempool_stack);
trace!("clusters: {:#?}", clusters);
trace!("modified: {:#?}", modified);
trace!("audit_pool: {:#?}", audit_pool);
trace!("blocks: {:#?}", blocks);
trace!("block_weight: {:#?}", block_weight);
trace!("block_sigops: {:#?}", block_sigops);
trace!("transactions: {:#?}", transactions);
trace!("overflow: {:#?}", overflow);
trace!("failures: {:#?}", failures);
trace!("\n==================================");
let next_from_stack = next_valid_from_stack(&mut mempool_stack, &audit_pool);
let next_from_queue = next_valid_from_queue(&mut modified, &audit_pool);
if next_from_stack.is_none() && next_from_queue.is_none() {
continue;
}
let (next_tx, from_stack) = match (next_from_stack, next_from_queue) {
(Some(stack_tx), Some(queue_tx)) => match queue_tx.cmp(stack_tx) {
std::cmp::Ordering::Less => (stack_tx, true),
_ => (queue_tx, false),
},
(Some(stack_tx), None) => (stack_tx, true),
(None, Some(queue_tx)) => (queue_tx, false),
(None, None) => unreachable!(),
};
if from_stack {
mempool_stack.pop();
} else {
modified.pop();
}
if blocks.len() < (MAX_BLOCKS - 1)
&& ((block_weight + (4 * next_tx.ancestor_sigop_adjusted_vsize())
>= MAX_BLOCK_WEIGHT_UNITS)
|| (block_sigops + next_tx.ancestor_sigops() > BLOCK_SIGOPS))
{
// hold this package in an overflow list while we check for smaller options
overflow.push(next_tx.uid);
failures += 1;
} else {
let mut package: Vec<(u32, u32, usize)> = Vec::new();
let mut cluster: Vec<u32> = Vec::new();
let is_cluster: bool = !next_tx.ancestors.is_empty();
for ancestor_id in &next_tx.ancestors {
if let Some(Some(ancestor)) = audit_pool.get(*ancestor_id as usize) {
package.push((*ancestor_id, ancestor.order(), ancestor.ancestors.len()));
}
}
package.sort_unstable_by(|a, b| -> Ordering {
if a.2 != b.2 {
// order by ascending ancestor count
a.2.cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by ascending partial txid
a.1.cmp(&b.1)
} else {
// tie-break partial txid collisions by ascending uid
a.0.cmp(&b.0)
}
});
package.push((next_tx.uid, next_tx.order(), next_tx.ancestors.len()));
let cluster_rate = next_tx.cluster_rate();
for (txid, _, _) in &package {
cluster.push(*txid);
if let Some(Some(tx)) = audit_pool.get_mut(*txid as usize) {
tx.used = true;
tx.set_dirty_if_different(cluster_rate);
transactions.push(tx.uid);
block_weight += tx.weight;
block_sigops += tx.sigops;
}
update_descendants(*txid, &mut audit_pool, &mut modified, cluster_rate);
}
if is_cluster {
clusters.push(cluster);
}
failures = 0;
}
// this block is full
let exceeded_package_tries =
failures > 1000 && block_weight > (MAX_BLOCK_WEIGHT_UNITS - BLOCK_RESERVED_WEIGHT);
let queue_is_empty = mempool_stack.is_empty() && modified.is_empty();
if (exceeded_package_tries || queue_is_empty) && blocks.len() < (MAX_BLOCKS - 1) {
// finalize this block
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
// reset for the next block
transactions = Vec::with_capacity(initial_txes_per_block);
block_weight = BLOCK_RESERVED_WEIGHT;
block_sigops = BLOCK_RESERVED_SIGOPS;
failures = 0;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
overflow.reverse();
for overflowed in &overflow {
if let Some(Some(overflowed_tx)) = audit_pool.get(*overflowed as usize) {
if overflowed_tx.modified {
modified.push(
*overflowed,
TxPriority {
uid: *overflowed,
order: overflowed_tx.order(),
score: overflowed_tx.score(),
},
);
} else {
mempool_stack.push(*overflowed);
}
}
}
overflow = Vec::new();
}
}
info!("add the final unbounded block if it contains any transactions");
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
info!("make a list of dirty transactions and their new rates");
let mut rates: Vec<Vec<f64>> = Vec::new();
for (uid, thread_tx) in mempool {
// Takes ownership of the audit_tx and replaces with None
if let Some(Some(audit_tx)) = audit_pool.get_mut(*uid as usize).map(Option::take) {
trace!("txid: {}, is_dirty: {}", uid, audit_tx.dirty);
if audit_tx.dirty {
rates.push(vec![f64::from(*uid), audit_tx.effective_fee_per_vsize]);
thread_tx.effective_fee_per_vsize = audit_tx.effective_fee_per_vsize;
}
// Drops the AuditTransaction manually
// There are no audit_txs that are not in the mempool HashMap
// So there is guaranteed to be no memory leaks.
ManuallyDrop::into_inner(audit_tx);
}
}
trace!("\n\n\n\n\n====================");
trace!("blocks: {:#?}", blocks);
trace!("clusters: {:#?}", clusters);
trace!("rates: {:#?}\n====================\n\n\n\n\n", rates);
GbtResult {
blocks,
block_weights,
clusters,
rates,
}
}
fn next_valid_from_stack<'a>(
mempool_stack: &mut Vec<u32>,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some(next_txid) = mempool_stack.last() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used && !tx.modified => {
return Some(tx);
}
_ => {
mempool_stack.pop();
}
}
}
None
}
fn next_valid_from_queue<'a>(
queue: &mut ModifiedQueue,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some((next_txid, _)) = queue.peek() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used => {
return Some(tx);
}
_ => {
queue.pop();
}
}
}
None
}
fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut parents: HashSet<u32, U32HasherState> = u32hashset_new();
if let Some(Some(tx)) = audit_pool.get(txid as usize) {
if tx.relatives_set_flag {
return;
}
for input in &tx.inputs {
parents.insert(*input);
}
} else {
return;
}
let mut ancestors: HashSet<u32, U32HasherState> = u32hashset_new();
for parent_id in &parents {
set_relatives(*parent_id, audit_pool);
if let Some(Some(parent)) = audit_pool.get_mut(*parent_id as usize) {
// Safety: ancestors must always contain only txes in audit_pool
ancestors.insert(*parent_id);
parent.children.insert(txid);
for ancestor in &parent.ancestors {
ancestors.insert(*ancestor);
}
}
}
let mut total_fee: u64 = 0;
let mut total_sigop_adjusted_weight: u32 = 0;
let mut total_sigop_adjusted_vsize: u32 = 0;
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {
tx.set_ancestors(
ancestors,
total_fee,
total_sigop_adjusted_weight,
total_sigop_adjusted_vsize,
total_sigops,
);
}
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
fn update_descendants(
root_txid: u32,
audit_pool: &mut AuditPool,
modified: &mut ModifiedQueue,
cluster_rate: f64,
) {
let mut visited: HashSet<u32, U32HasherState> = u32hashset_new();
let mut descendant_stack: Vec<u32> = Vec::new();
let root_fee: u64;
let root_sigop_adjusted_weight: u32;
let root_sigop_adjusted_vsize: u32;
let root_sigops: u32;
if let Some(Some(root_tx)) = audit_pool.get(root_txid as usize) {
for descendant_id in &root_tx.children {
if !visited.contains(descendant_id) {
descendant_stack.push(*descendant_id);
visited.insert(*descendant_id);
}
}
root_fee = root_tx.fee;
root_sigop_adjusted_weight = root_tx.sigop_adjusted_weight;
root_sigop_adjusted_vsize = root_tx.sigop_adjusted_vsize;
root_sigops = root_tx.sigops;
} else {
return;
}
while let Some(next_txid) = descendant_stack.pop() {
if let Some(Some(descendant)) = audit_pool.get_mut(next_txid as usize) {
// remove root tx as ancestor
let old_score = descendant.remove_root(
root_txid,
root_fee,
root_sigop_adjusted_weight,
root_sigop_adjusted_vsize,
root_sigops,
cluster_rate,
);
// add to priority queue or update priority if score has changed
if descendant.score() < old_score {
descendant.modified = true;
modified.push_decrease(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
} else if descendant.score() > old_score {
descendant.modified = true;
modified.push_increase(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
}
// add this node's children to the stack
for child_id in &descendant.children {
if !visited.contains(child_id) {
descendant_stack.push(*child_id);
visited.insert(*child_id);
}
}
}
}
}

177
backend/rust-gbt/src/lib.rs Normal file
View file

@ -0,0 +1,177 @@
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::float_cmp)]
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use thread_transaction::ThreadTransaction;
use tracing::{debug, info, trace};
use tracing_log::LogTracer;
use tracing_subscriber::{EnvFilter, FmtSubscriber};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
mod audit_transaction;
mod gbt;
mod thread_transaction;
mod u32_hasher_types;
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
/// This is the initial capacity of the `GbtGenerator` struct's inner `HashMap`.
///
/// Note: This doesn't *have* to be a power of 2. (uwu)
const STARTING_CAPACITY: usize = 1_048_576;
type ThreadTransactionsMap = HashMap<u32, ThreadTransaction, U32HasherState>;
#[napi]
pub struct GbtGenerator {
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
}
#[napi::module_init]
fn init() {
// Set all `tracing` logs to print to STDOUT
// Note: Passing RUST_LOG env variable to the node process
// will change the log level for the rust module.
tracing::subscriber::set_global_default(
FmtSubscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.with_ansi(
// Default to no-color logs.
// Setting RUST_LOG_COLOR to 1 or true|TRUE|True etc.
// will enable color
std::env::var("RUST_LOG_COLOR")
.map(|s| ["1", "true"].contains(&&*s.to_lowercase()))
.unwrap_or(false),
)
.finish(),
)
.expect("Logging subscriber failed");
// Convert all `log` logs into `tracing` events
LogTracer::init().expect("Legacy log subscriber failed");
}
#[napi]
impl GbtGenerator {
#[napi(constructor)]
#[allow(clippy::new_without_default)]
#[must_use]
pub fn new() -> Self {
debug!("Created new GbtGenerator");
Self {
thread_transactions: Arc::new(Mutex::new(u32hashmap_with_capacity(STARTING_CAPACITY))),
}
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn make(&self, mempool: Vec<ThreadTransaction>, max_uid: u32) -> Result<GbtResult> {
trace!("make: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in mempool {
map.insert(tx.uid, tx);
}
},
)
.await
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn update(
&self,
new_txs: Vec<ThreadTransaction>,
remove_txs: Vec<u32>,
max_uid: u32,
) -> Result<GbtResult> {
trace!("update: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in new_txs {
map.insert(tx.uid, tx);
}
for txid in &remove_txs {
map.remove(txid);
}
},
)
.await
}
}
/// The result from calling the gbt function.
///
/// This tuple contains the following:
/// blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
/// block_weights: A Vector of total weights per block.
/// clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
/// rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
#[napi(constructor)]
pub struct GbtResult {
pub blocks: Vec<Vec<u32>>,
pub block_weights: Vec<u32>,
pub clusters: Vec<Vec<u32>>,
pub rates: Vec<Vec<f64>>, // Tuples not supported. u32 fits inside f64
}
/// All on another thread, this runs an arbitrary task in between
/// taking the lock and running gbt.
///
/// Rather than filling / updating the `HashMap` on the main thread,
/// this allows for `HashMap` modifying tasks to be run before running and returning gbt results.
///
/// `thread_transactions` is a cloned `Arc` of the `Mutex` for the `HashMap` state.
/// `callback` is a `'static + Send` `FnOnce` closure/function that takes a mutable reference
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
async fn run_task<F>(
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
max_uid: usize,
callback: F,
) -> Result<GbtResult>
where
F: FnOnce(&mut ThreadTransactionsMap) + Send + 'static,
{
debug!("Spawning thread...");
let handle = napi::tokio::task::spawn_blocking(move || {
debug!(
"Getting lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
let mut map = thread_transactions
.lock()
.map_err(|_| napi::Error::from_reason("THREAD_TRANSACTIONS Mutex poisoned"))?;
callback(&mut map);
info!("Starting gbt algorithm for {} elements...", map.len());
let result = gbt::gbt(&mut map, max_uid);
info!("Finished gbt algorithm for {} elements...", map.len());
debug!(
"Releasing lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
drop(map);
Ok(result)
});
handle
.await
.map_err(|_| napi::Error::from_reason("thread panicked"))?
}

View file

@ -0,0 +1,13 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadTransaction {
pub uid: u32,
pub order: u32,
pub fee: f64,
pub weight: u32,
pub sigops: u32,
pub effective_fee_per_vsize: f64,
pub inputs: Vec<u32>,
}

View file

@ -0,0 +1,132 @@
use priority_queue::PriorityQueue;
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
hash::{BuildHasher, Hasher},
};
/// This is the only way to create a `HashMap` with the `U32HasherState` and capacity
pub fn u32hashmap_with_capacity<V>(capacity: usize) -> HashMap<u32, V, U32HasherState> {
HashMap::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `PriorityQueue` with the `U32HasherState` and capacity
pub fn u32priority_queue_with_capacity<V: Ord>(
capacity: usize,
) -> PriorityQueue<u32, V, U32HasherState> {
PriorityQueue::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `HashSet` with the `U32HasherState`
pub fn u32hashset_new() -> HashSet<u32, U32HasherState> {
HashSet::with_hasher(U32HasherState(()))
}
/// A private unit type is contained so no one can make an instance of it.
#[derive(Clone)]
pub struct U32HasherState(());
impl Debug for U32HasherState {
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
impl BuildHasher for U32HasherState {
type Hasher = U32Hasher;
fn build_hasher(&self) -> Self::Hasher {
U32Hasher(0)
}
}
/// This also can't be created outside this module due to private field.
pub struct U32Hasher(u32);
impl Hasher for U32Hasher {
fn finish(&self) -> u64 {
// Safety: Two u32s next to each other will make a u64
bytemuck::cast([self.0, 0])
}
fn write(&mut self, bytes: &[u8]) {
// Assert in debug builds (testing too) that only 4 byte keys (u32, i32, f32, etc.) run
debug_assert!(bytes.len() == 4);
// Safety: We know that the size of the key is 4 bytes
// We also know that the only way to get an instance of HashMap using this "hasher"
// is through the public functions in this module which set the key type to u32.
self.0 = *bytemuck::from_bytes(bytes);
}
}
#[cfg(test)]
mod tests {
use super::U32HasherState;
use priority_queue::PriorityQueue;
use std::collections::HashMap;
#[test]
fn test_hashmap() {
let mut hm: HashMap<u32, String, U32HasherState> = HashMap::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
hm.insert(0, String::from("0"));
hm.insert(42, String::from("42"));
hm.insert(256, String::from("256"));
hm.insert(u32::MAX, String::from("MAX"));
hm.insert(u32::MAX >> 2, String::from("MAX >> 2"));
assert_eq!(hm.get(&0), Some(&String::from("0")));
assert_eq!(hm.get(&42), Some(&String::from("42")));
assert_eq!(hm.get(&256), Some(&String::from("256")));
assert_eq!(hm.get(&u32::MAX), Some(&String::from("MAX")));
assert_eq!(hm.get(&(u32::MAX >> 2)), Some(&String::from("MAX >> 2")));
assert_eq!(hm.get(&(u32::MAX >> 4)), None);
assert_eq!(hm.get(&3), None);
assert_eq!(hm.get(&43), None);
}
#[test]
fn test_priority_queue() {
let mut pq: PriorityQueue<u32, i32, U32HasherState> =
PriorityQueue::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
assert_eq!(pq.push(1, 5), None);
assert_eq!(pq.push(2, -10), None);
assert_eq!(pq.push(3, 7), None);
assert_eq!(pq.push(4, 20), None);
assert_eq!(pq.push(u32::MAX, -42), None);
assert_eq!(pq.push_increase(1, 4), Some(4));
assert_eq!(pq.push_increase(2, -8), Some(-10));
assert_eq!(pq.push_increase(3, 5), Some(5));
assert_eq!(pq.push_increase(4, 21), Some(20));
assert_eq!(pq.push_increase(u32::MAX, -99), Some(-99));
assert_eq!(pq.push_increase(42, 1337), None);
assert_eq!(pq.push_decrease(1, 4), Some(5));
assert_eq!(pq.push_decrease(2, -10), Some(-8));
assert_eq!(pq.push_decrease(3, 5), Some(7));
assert_eq!(pq.push_decrease(4, 20), Some(21));
assert_eq!(pq.push_decrease(u32::MAX, 100), Some(100));
assert_eq!(pq.push_decrease(69, 420), None);
assert_eq!(pq.peek(), Some((&42, &1337)));
assert_eq!(pq.pop(), Some((42, 1337)));
assert_eq!(pq.peek(), Some((&69, &420)));
assert_eq!(pq.pop(), Some((69, 420)));
assert_eq!(pq.peek(), Some((&4, &20)));
assert_eq!(pq.pop(), Some((4, 20)));
assert_eq!(pq.peek(), Some((&3, &5)));
assert_eq!(pq.pop(), Some((3, 5)));
assert_eq!(pq.peek(), Some((&1, &4)));
assert_eq!(pq.pop(), Some((1, 4)));
assert_eq!(pq.peek(), Some((&2, &-10)));
assert_eq!(pq.pop(), Some((2, -10)));
assert_eq!(pq.peek(), Some((&u32::MAX, &-42)));
assert_eq!(pq.pop(), Some((u32::MAX, -42)));
assert_eq!(pq.peek(), None);
assert_eq!(pq.pop(), None);
}
}

View file

@ -16,7 +16,7 @@
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": 11,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 12,
"EXTERNAL_RETRY_INTERVAL": 13,
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
@ -24,17 +24,22 @@
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"AUDIT": "__MEMPOOL_AUDIT__",
"ADVANCED_GBT_AUDIT": "__MEMPOOL_ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__MEMPOOL_ADVANCED_GBT_MEMPOOL__",
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__",
"MAX_BLOCKS_BULK_QUERY": "__MEMPOOL_MAX_BLOCKS_BULK_QUERY__"
"AUDIT": true,
"ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": false,
"CPFP_INDEXING": true,
"MAX_BLOCKS_BULK_QUERY": 999,
"DISK_CACHE_BLOCK_INTERVAL": 999,
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
"ALLOW_UNREACHABLE": true
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": 15,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__"
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": 1000
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
@ -42,13 +47,16 @@
"TLS_ENABLED": true
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": 888
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": 17,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__"
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": 2000
},
"DATABASE": {
"ENABLED": false,
@ -57,7 +65,8 @@
"PORT": 18,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": 3000
},
"SYSLOG": {
"ENABLED": false,
@ -95,21 +104,28 @@
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"LIGHTNING": {
"ENABLED": "__LIGHTNING_ENABLED__",
"ENABLED": true,
"BACKEND": "__LIGHTNING_BACKEND__",
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
"FORENSICS_RATE_LIMIT": 1234
},
"LND": {
"TLS_CERT_PATH": "",
"MACAROON_PATH": "",
"REST_API_URL": "https://localhost:8080"
"REST_API_URL": "https://localhost:8080",
"TIMEOUT": 10000
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
},
"REPLICATION": {
"ENABLED": false,
"AUDIT": false,
"AUDIT_START_HEIGHT": 774000,
"SERVERS": []
}
}

View file

@ -14,11 +14,11 @@ describe('Mempool Difficulty Adjustment', () => {
750134, // Current block height
0.6280047707459726, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
0, // If not testnet, not used
0, // Latest block timestamp in seconds (only used if difficulty already locked in)
],
{ // Expected Result
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
@ -41,7 +41,7 @@ describe('Mempool Difficulty Adjustment', () => {
],
{ // Expected Result is same other than timeOffset
progressPercent: 9.027777777777777,
difficultyChange: 12.562233927411782,
difficultyChange: 13.180707740199772,
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
@ -54,6 +54,29 @@ describe('Mempool Difficulty Adjustment', () => {
expectedBlocks: 161.68833333333333,
},
],
[ // Vector 3 (mainnet lock-in (epoch ending 788255))
[ // Inputs
dt('2023-04-20T09:57:33.000Z'), // Last DA time (in seconds)
dt('2023-05-04T14:54:09.000Z'), // Current time (now) (in seconds)
788255, // Current block height
1.7220298879531821, // Previous retarget % (Passed through)
'mainnet', // Network (if testnet, next value is non-zero)
dt('2023-05-04T14:54:26.000Z'), // Latest block timestamp in seconds
],
{ // Expected Result
progressPercent: 99.95039682539682,
difficultyChange: -1.4512637555574193,
estimatedRetargetDate: 1683212658129,
remainingBlocks: 1,
remainingTime: 609129,
previousRetarget: 1.7220298879531821,
previousTime: 1681984653,
nextRetargetHeight: 788256,
timeAvg: 609129,
timeOffset: 0,
expectedBlocks: 2045.66,
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
for (const vector of vectors) {

View file

@ -40,26 +40,32 @@ describe('Mempool Backend Config', () => {
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
RUST_GBT: false,
CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0,
DISK_CACHE_BLOCK_INTERVAL: 6,
MAX_PUSH_TX_SIZE_WEIGHT: 400000,
ALLOW_UNREACHABLE: true,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000' });
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000', UNIX_SOCKET_PATH: null, RETRY_UNIX_SOCKET_AFTER: 30000 });
expect(config.CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
PASSWORD: 'mempool',
TIMEOUT: 60000
});
expect(config.SECOND_CORE_RPC).toStrictEqual({
HOST: '127.0.0.1',
PORT: 8332,
USERNAME: 'mempool',
PASSWORD: 'mempool'
PASSWORD: 'mempool',
TIMEOUT: 60000
});
expect(config.DATABASE).toStrictEqual({
@ -69,7 +75,8 @@ describe('Mempool Backend Config', () => {
PORT: 3306,
DATABASE: 'mempool',
USERNAME: 'mempool',
PASSWORD: 'mempool'
PASSWORD: 'mempool',
TIMEOUT: 180000,
});
expect(config.SYSLOG).toStrictEqual({
@ -106,6 +113,20 @@ describe('Mempool Backend Config', () => {
BISQ_URL: 'https://bisq.markets/api',
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
});
expect(config.MAXMIND).toStrictEqual({
ENABLED: false,
GEOLITE2_CITY: '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
GEOLITE2_ASN: '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
GEOIP2_ISP: '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
});
expect(config.REPLICATION).toStrictEqual({
ENABLED: false,
AUDIT: false,
AUDIT_START_HEIGHT: 774000,
SERVERS: []
});
});
});
@ -141,4 +162,94 @@ describe('Mempool Backend Config', () => {
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
});
});
test('should ensure the docker start.sh script has default values', () => {
jest.isolateModules(() => {
const startSh = fs.readFileSync(`${__dirname}/../../../docker/backend/start.sh`, 'utf-8');
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
// We have a few cases where we can't follow the pattern
if (root === 'MEMPOOL' && key === 'HTTP_PORT') {
console.log('skipping check for MEMPOOL_HTTP_PORT');
return;
}
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
return;
} else {
parseJson(value, key);
}
break;
}
default: {
//The flattened string, i.e, __MEMPOOL_ENABLED__
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
//The string used as the environment variable, i.e, MEMPOOL_ENABLED
const envVarStr = `${root ? root : ''}_${key}`;
//The string used as the default value, to be checked as a regex, i.e, __MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=(.*)}
const defaultEntry = replaceStr + '=' + '\\${' + envVarStr + ':=(.*)' + '}';
console.log(`looking for ${defaultEntry} in the start.sh script`);
const re = new RegExp(defaultEntry);
expect(startSh).toMatch(re);
//The string that actually replaces the values in the config file
const sedStr = 'sed -i "s!' + replaceStr + '!${' + replaceStr + '}!g" mempool-config.json';
console.log(`looking for ${sedStr} in the start.sh script`);
expect(startSh).toContain(sedStr);
break;
}
}
}
}
parseJson(fixture);
});
});
test('should ensure that the mempool-config.json Docker template has all the keys', () => {
jest.isolateModules(() => {
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
const dockerJson = fs.readFileSync(`${__dirname}/../../../docker/backend/mempool-config.json`, 'utf-8');
function parseJson(jsonObj, root?) {
for (const [key, value] of Object.entries(jsonObj)) {
switch (typeof value) {
case 'object': {
if (Array.isArray(value)) {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
} else {
//Check for top level config keys
expect(dockerJson).toContain(`"${key}"`);
parseJson(value, key);
break;
}
}
case 'string': {
// strings should be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": "${replaceStr}"`);
break;
}
default: {
// numbers, arrays and booleans won't be enclosed by quotes
const replaceStr = `${root ? '__' + root + '_' : '__'}${key}__`;
expect(dockerJson).toContain(`"${key}": ${replaceStr}`);
break;
}
}
};
}
parseJson(fixture);
});
});
});

View file

@ -0,0 +1,68 @@
import fs from 'fs';
import { GbtGenerator, ThreadTransaction } from '../../../rust-gbt';
import path from 'path';
const baseline = require('./test-data/target-template.json');
const testVector = require('./test-data/test-data-ids.json');
const vectorUidMap: Map<number, string> = new Map(testVector.map(x => [x[0], x[1]]));
const vectorTxidMap: Map<string, number> = new Map(testVector.map(x => [x[1], x[0]]));
// Note that this test buffer is specially constructed
// such that uids are assigned in numerical txid order
// so that ties break the same way as in Core's implementation
const vectorBuffer: Buffer = fs.readFileSync(path.join(__dirname, './', './test-data/test-buffer.bin'));
describe('Rust GBT', () => {
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
const rustGbt = new GbtGenerator();
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
const result = await rustGbt.make(mempool, maxUid);
const blocks: [string, number][][] = result.blocks.map(block => {
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);
});
const template = baseline.map(tx => [tx.txid, vectorTxidMap.get(tx.txid)]);
expect(blocks[0].length).toEqual(baseline.length);
expect(blocks[0]).toEqual(template);
});
});
function mempoolFromArrayBuffer(buf: ArrayBuffer): { mempool: ThreadTransaction[], maxUid: number } {
let maxUid = 0;
const view = new DataView(buf);
const count = view.getUint32(0, false);
const txs: ThreadTransaction[] = [];
let offset = 4;
for (let i = 0; i < count; i++) {
const uid = view.getUint32(offset, false);
maxUid = Math.max(maxUid, uid);
const tx: ThreadTransaction = {
uid,
order: txidToOrdering(vectorUidMap.get(uid) as string),
fee: view.getFloat64(offset + 4, false),
weight: view.getUint32(offset + 12, false),
sigops: view.getUint32(offset + 16, false),
// feePerVsize: view.getFloat64(offset + 20, false),
effectiveFeePerVsize: view.getFloat64(offset + 28, false),
inputs: [],
};
const numInputs = view.getUint32(offset + 36, false);
offset += 40;
for (let j = 0; j < numInputs; j++) {
tx.inputs.push(view.getUint32(offset, false));
offset += 4;
}
txs.push(tx);
}
return { mempool: txs, maxUid };
}
function txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}

File diff suppressed because it is too large Load diff

Binary file not shown.

File diff suppressed because one or more lines are too long

View file

@ -1,21 +1,26 @@
import config from '../config';
import { TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
import logger from '../logger';
import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
import rbfCache from './rbf-cache';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number } {
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended })
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0 };
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], score: 0, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const fresh: string[] = []; // missing, but firstSeen or lastBoosted within PROPAGATION_MARGIN
const fullrbf: string[] = []; // either missing or present, and part of a fullrbf replacement
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
const inTemplate = {};
@ -31,18 +36,30 @@ class Audit {
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
if (rbfCache.isFullRbf(txid)) {
fullrbf.push(txid);
} else if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
// tx is recent, may have reached the miner too late for inclusion
fresh.push(txid);
} else if (mempool[txid]?.lastBoosted != null && (now - (mempool[txid]?.lastBoosted || 0)) <= PROPAGATION_MARGIN) {
// tx was recently cpfp'd, miner may not have the latest effective rate
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
displacedWeight += mempool[txid]?.weight || 0;
} else {
matchedWeight += mempool[txid]?.weight || 0;
}
projectedWeight += mempool[txid]?.weight || 0;
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
if (transactions[0]) {
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
}
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
@ -52,19 +69,24 @@ class Audit {
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
const tx = mempool[txid];
if (tx) {
const fits = (tx.weight - displacedWeightRemaining) < 4000;
const feeMatches = tx.effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, tx.effectiveFeePerVsize);
}
if (tx.firstSeen == null || (now - (tx?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= tx.weight;
}
failures = 0;
} else {
failures++;
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
logger.warn('projected transaction missing from mempool cache');
}
index++;
}
@ -76,19 +98,11 @@ class Audit {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
if (rbfCache.isFullRbf(tx.txid)) {
fullrbf.push(tx.txid);
} else if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
} else {
}
let blockIndex = -1;
let index = -1;
projectedBlocks.forEach((block, bi) => {
const i = block.transactionIds.indexOf(tx.txid);
if (i >= 0) {
blockIndex = bi;
index = i;
}
});
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
@ -101,32 +115,41 @@ class Audit {
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
const tx = mempool[txid];
if (tx) {
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (tx.effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = tx.effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (tx.effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
} else {
logger.warn('projected transaction missing from mempool cache');
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
const numCensored = Object.keys(isCensored).length;
const numMatches = matches.length - 1; // adjust for coinbase tx
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
return {
censored: Object.keys(isCensored),
added,
fresh,
score
sigop: [],
fullrbf,
score,
similarity,
};
}
}

View file

@ -7,7 +7,6 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
import { BisqBlocks, BisqBlock, BisqTransaction, BisqStats, BisqTrade } from './interfaces';
import { Common } from '../common';
import { BlockExtended } from '../../mempool.interfaces';
import { StaticPool } from 'node-worker-threads-pool';
import backendInfo from '../backend-info';
import logger from '../../logger';
@ -31,10 +30,6 @@ class Bisq {
private priceUpdateCallbackFunction: ((price: number) => void) | undefined;
private topDirectoryWatcher: fs.FSWatcher | undefined;
private subdirectoryWatcher: fs.FSWatcher | undefined;
private jsonParsePool = new StaticPool({
size: 4,
task: (blob: string) => JSON.parse(blob),
});
constructor() {}

View file

@ -29,6 +29,7 @@ class BitcoinApi implements AbstractBitcoinApi {
weight: block.weight,
previousblockhash: block.previousblockhash,
mediantime: block.mediantime,
stale: block.confirmations === -1,
};
}
@ -64,17 +65,11 @@ class BitcoinApi implements AbstractBitcoinApi {
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.height;
});
return this.bitcoindClient.getBlockCount();
}
$getBlockHashTip(): Promise<string> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
return result.find(tip => tip.status === 'active')!.hash;
});
return this.bitcoindClient.getBestBlockHash();
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
@ -415,12 +410,38 @@ class BitcoinApi implements AbstractBitcoinApi {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness && vin.witness.length > 1) {
const witnessScript = vin.witness[vin.witness.length - 2];
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
if (vin.prevout.scriptpubkey_type === 'v1_p2tr' && vin.witness) {
const witnessScript = this.witnessToP2TRScript(vin.witness);
if (witnessScript !== null) {
vin.inner_witnessscript_asm = this.convertScriptSigAsm(witnessScript);
}
}
}
/**
* This function must only be called when we know the witness we are parsing
* is a taproot witness.
* @param witness An array of hex strings that represents the witness stack of
* the input.
* @returns null if the witness is not a script spend, and the hex string of
* the script item if it is a script spend.
*/
private witnessToP2TRScript(witness: string[]): string | null {
if (witness.length < 2) return null;
// Note: see BIP341 for parsing details of witness stack
// If there are at least two witness elements, and the first byte of the
// last element is 0x50, this last element is called annex a and
// is removed from the witness stack.
const hasAnnex = witness[witness.length - 1].substring(0, 2) === '50';
// If there are at least two witness elements left, script path spending is used.
// Call the second-to-last stack element s, the script.
// (Note: this phrasing from BIP341 assumes we've *removed* the annex from the stack)
if (hasAnnex && witness.length < 3) return null;
const positionOfScript = hasAnnex ? witness.length - 3 : witness.length - 2;
return witness[positionOfScript];
}
}
export default BitcoinApi;

View file

@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
port: config.CORE_RPC.PORT,
user: config.CORE_RPC.USERNAME,
pass: config.CORE_RPC.PASSWORD,
timeout: 60000,
timeout: config.CORE_RPC.TIMEOUT,
};
export default new bitcoin.Client(nodeRpcCredentials);

View file

@ -7,7 +7,7 @@ const nodeRpcCredentials: BitcoinRpcCredentials = {
port: config.SECOND_CORE_RPC.PORT,
user: config.SECOND_CORE_RPC.USERNAME,
pass: config.SECOND_CORE_RPC.PASSWORD,
timeout: 60000,
timeout: config.SECOND_CORE_RPC.TIMEOUT,
};
export default new bitcoin.Client(nodeRpcCredentials);

View file

@ -32,8 +32,10 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/replaces', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/rbf', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/cached', this.getCachedTx)
.get(config.MEMPOOL.API_URL_PREFIX + 'replacements', this.getRbfReplacements)
.get(config.MEMPOOL.API_URL_PREFIX + 'fullrbf/replacements', this.getFullRbfReplacements)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
try {
@ -94,6 +96,7 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
@ -110,7 +113,6 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
@ -119,7 +121,6 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
;
}
@ -128,8 +129,9 @@ class BitcoinRoutes {
private getInitData(req: Request, res: Response) {
try {
const result = websocketHandler.getInitData();
res.json(result);
const result = websocketHandler.getSerializedInitData();
res.set('Content-Type', 'application/json');
res.send(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@ -208,6 +210,8 @@ class BitcoinRoutes {
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
sigops: tx.sigops,
adjustedVsize: tx.adjustedVsize,
});
return;
}
@ -219,7 +223,12 @@ class BitcoinRoutes {
} else {
let cpfpInfo;
if (config.DATABASE.ENABLED) {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
try {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
} catch (e) {
res.status(500).send('failed to get CPFP info');
return;
}
}
if (cpfpInfo) {
res.json(cpfpInfo);
@ -389,9 +398,13 @@ class BitcoinRoutes {
private async getBlockAuditSummary(req: Request, res: Response) {
try {
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
const auditSummary = await blocks.$getBlockAuditSummary(req.params.hash);
if (auditSummary) {
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(auditSummary);
} else {
return res.status(404).send(`audit not available`);
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@ -532,27 +545,28 @@ class BitcoinRoutes {
}
}
private async getAddressTransactions(req: Request, res: Response) {
private async getAddressTransactions(req: Request, res: Response): Promise<void> {
if (config.MEMPOOL.BACKEND === 'none') {
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
return;
}
try {
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
let lastTxId: string = '';
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
lastTxId = req.query.after_txid;
}
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, lastTxId);
res.json(transactions);
} catch (e) {
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
return res.status(413).send(e instanceof Error ? e.message : e);
res.status(413).send(e instanceof Error ? e.message : e);
return;
}
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getAdressTxChain(req: Request, res: Response) {
res.status(501).send('Not implemented');
}
private async getAddressPrefix(req: Request, res: Response) {
try {
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);
@ -589,10 +603,14 @@ class BitcoinRoutes {
}
}
private async getBlockTipHeight(req: Request, res: Response) {
private getBlockTipHeight(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getBlockHeightTip();
res.json(result);
const result = blocks.getCurrentBlockHeight();
if (!result) {
return res.status(503).send(`Service Temporarily Unavailable`);
}
res.setHeader('content-type', 'text/plain');
res.send(result.toString());
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@ -638,8 +656,30 @@ class BitcoinRoutes {
private async getRbfHistory(req: Request, res: Response) {
try {
const result = rbfCache.getReplaces(req.params.txId);
res.json(result || []);
const replacements = rbfCache.getRbfTree(req.params.txId) || null;
const replaces = rbfCache.getReplaces(req.params.txId) || null;
res.json({
replacements,
replaces
});
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(false);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getFullRbfReplacements(req: Request, res: Response) {
try {
const result = rbfCache.getRbfTrees(true);
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
@ -683,12 +723,7 @@ class BitcoinRoutes {
private async $postTransaction(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
try {
let rawTx;
if (typeof req.body === 'object') {
rawTx = Object.keys(req.body)[0];
} else {
rawTx = req.body;
}
const rawTx = Common.getTransactionFromRequest(req, false);
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
res.send(txIdResult);
} catch (e: any) {
@ -699,12 +734,8 @@ class BitcoinRoutes {
private async $postTransactionForm(req: Request, res: Response) {
res.setHeader('content-type', 'text/plain');
const matches = /tx=([a-z0-9]+)/.exec(req.body);
let txHex = '';
if (matches && matches[1]) {
txHex = matches[1];
}
try {
const txHex = Common.getTransactionFromRequest(req, true);
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
res.send(txIdResult);
} catch (e: any) {

View file

@ -16,7 +16,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
super(bitcoinClient);
const electrumConfig = { client: 'mempool-v2', version: '1.4' };
const electrumPersistencePolicy = { retryPeriod: 10000, maxRetry: 1000, callback: null };
const electrumPersistencePolicy = { retryPeriod: 1000, maxRetry: Number.MAX_SAFE_INTEGER, callback: null };
const electrumCallbacks = {
onConnect: (client, versionInfo) => { logger.info(`Connected to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT} (${JSON.stringify(versionInfo)})`); },

View file

@ -89,6 +89,7 @@ export namespace IEsploraApi {
weight: number;
previousblockhash: string;
mediantime: number;
stale: boolean;
}
export interface Address {

View file

@ -3,65 +3,102 @@ import axios, { AxiosRequestConfig } from 'axios';
import http from 'http';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
import logger from '../../logger';
const axiosConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true })
httpAgent: new http.Agent({ keepAlive: true, })
});
class ElectrsApi implements AbstractBitcoinApi {
axiosConfig: AxiosRequestConfig = {
private axiosConfigWithUnixSocket: AxiosRequestConfig = config.ESPLORA.UNIX_SOCKET_PATH ? {
socketPath: config.ESPLORA.UNIX_SOCKET_PATH,
timeout: 10000,
} : {
timeout: 10000,
};
private axiosConfigTcpSocketOnly: AxiosRequestConfig = {
timeout: 10000,
};
constructor() { }
unixSocketRetryTimeout;
activeAxiosConfig;
constructor() {
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
}
fallbackToTcpSocket() {
if (!this.unixSocketRetryTimeout) {
logger.err(`Unable to connect to esplora unix socket. Falling back to tcp socket. Retrying unix socket in ${config.ESPLORA.RETRY_UNIX_SOCKET_AFTER / 1000} seconds`);
// Retry the unix socket after a few seconds
this.unixSocketRetryTimeout = setTimeout(() => {
logger.info(`Retrying to use unix socket for esplora now (applied for the next query)`);
this.activeAxiosConfig = this.axiosConfigWithUnixSocket;
this.unixSocketRetryTimeout = undefined;
}, config.ESPLORA.RETRY_UNIX_SOCKET_AFTER);
}
// Use the TCP socket (reach a different esplora instance through nginx)
this.activeAxiosConfig = this.axiosConfigTcpSocketOnly;
}
$queryWrapper<T>(url, responseType = 'json'): Promise<T> {
return axiosConnection.get<T>(url, { ...this.activeAxiosConfig, responseType: responseType })
.then((response) => response.data)
.catch((e) => {
if (e?.code === 'ECONNREFUSED') {
this.fallbackToTcpSocket();
// Retry immediately
return axiosConnection.get<T>(url, this.activeAxiosConfig)
.then((response) => response.data)
.catch((e) => {
logger.warn(`Cannot query esplora through the unix socket nor the tcp socket. Exception ${e}`);
throw e;
});
} else {
throw e;
}
});
}
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return axiosConnection.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids');
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return axiosConnection.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId);
}
$getTransactionHex(txId: string): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex');
}
$getBlockHeightTip(): Promise<number> {
return axiosConnection.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height');
}
$getBlockHashTip(): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash');
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return axiosConnection.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids');
}
$getBlockHash(height: number): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height);
}
$getBlockHeader(hash: string): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header');
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return axiosConnection.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash);
}
$getRawBlock(hash: string): Promise<Buffer> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
return this.$queryWrapper<any>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", 'arraybuffer')
.then((response) => { return Buffer.from(response.data); });
}
@ -82,13 +119,11 @@ class ElectrsApi implements AbstractBitcoinApi {
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return axiosConnection.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return axiosConnection.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
.then((response) => response.data);
return this.$queryWrapper<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends');
}
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {

View file

@ -2,7 +2,7 @@ import config from '../config';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import logger from '../logger';
import memPool from './mempool';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo, CpfpSummary, MempoolTransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
import diskCache from './disk-cache';
import transactionUtils from './transaction-utils';
@ -25,6 +25,7 @@ import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmen
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import chainTips from './chain-tips';
import websocketHandler from './websocket-handler';
class Blocks {
private blocks: BlockExtended[] = [];
@ -34,7 +35,9 @@ class Blocks {
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>)[] = [];
private mainLoopTimeout: number = 120000;
constructor() { }
@ -58,7 +61,7 @@ class Blocks {
this.newBlockCallbacks.push(fn);
}
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn);
}
@ -73,10 +76,14 @@ class Blocks {
blockHash: string,
blockHeight: number,
onlyCoinbase: boolean,
txIds: string[] | null = null,
quiet: boolean = false,
addMempoolData: boolean = false,
): Promise<TransactionExtended[]> {
const transactions: TransactionExtended[] = [];
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
if (!txIds) {
txIds = await bitcoinApi.$getTxIdsForBlock(blockHash);
}
const mempool = memPool.getMempool();
let transactionsFound = 0;
@ -94,14 +101,14 @@ class Blocks {
logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`);
}
try {
const tx = await transactionUtils.$getTransactionExtended(txIds[i]);
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, false, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} catch (e) {
try {
if (config.MEMPOOL.BACKEND === 'esplora') {
// Try again with core
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true);
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true, addMempoolData);
transactions.push(tx);
transactionsFetched++;
} else {
@ -124,12 +131,6 @@ class Blocks {
}
}
transactions.forEach((tx) => {
if (!tx.cpfpChecked) {
Common.setRelativesAndGetCpfpInfo(tx, mempool); // Child Pay For Parent
}
});
if (!quiet) {
logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`);
}
@ -143,7 +144,10 @@ class Blocks {
* @returns BlockSummary
*/
public summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
const stripped = block.tx.map((tx) => {
if (Common.isLiquid()) {
block = this.convertLiquidFees(block);
}
const stripped = block.tx.map((tx: IBitcoinApi.VerboseTransaction) => {
return {
txid: tx.txid,
vsize: tx.weight / 4,
@ -158,6 +162,20 @@ class Blocks {
};
}
public summarizeBlockTransactions(hash: string, transactions: TransactionExtended[]): BlockSummary {
return {
id: hash,
transactions: Common.stripTransactions(transactions),
};
}
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
block.tx.forEach(tx => {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
});
return block;
}
/**
* Return a block with additional data (reward, coinbase, fees...)
* @param block
@ -190,8 +208,15 @@ class Blocks {
extras.segwitTotalWeight = 0;
} else {
const stats: IBitcoinApi.BlockStats = await bitcoinClient.getBlockStats(block.id);
extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
let feeStats = {
medianFee: stats.feerate_percentiles[2], // 50th percentiles
feeRange: [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat(),
};
if (transactions?.length > 1) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
}
extras.medianFee = feeStats.medianFee;
extras.feeRange = feeStats.feeRange;
extras.totalFees = stats.totalfee;
extras.avgFee = stats.avgfee;
extras.avgFeeRate = stats.avgfeerate;
@ -260,10 +285,14 @@ class Blocks {
}
extras.matchRate = null;
extras.expectedFees = null;
extras.expectedWeight = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
extras.expectedFees = auditScore.expectedFees;
extras.expectedWeight = auditScore.expectedWeight;
}
}
}
@ -287,7 +316,7 @@ class Blocks {
}
const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig);
const address = txMinerInfo.vout[0].scriptpubkey_address;
const addresses = txMinerInfo.vout.map((vout) => vout.scriptpubkey_address).filter((address) => address);
let pools: PoolTag[] = [];
if (config.DATABASE.ENABLED === true) {
@ -297,11 +326,13 @@ class Blocks {
}
for (let i = 0; i < pools.length; ++i) {
if (address !== undefined) {
const addresses: string[] = typeof pools[i].addresses === 'string' ?
if (addresses.length) {
const poolAddresses: string[] = typeof pools[i].addresses === 'string' ?
JSON.parse(pools[i].addresses) : pools[i].addresses;
if (addresses.indexOf(address) !== -1) {
return pools[i];
for (let y = 0; y < poolAddresses.length; y++) {
if (addresses.indexOf(poolAddresses[y]) !== -1) {
return pools[i];
}
}
}
@ -393,12 +424,13 @@ class Blocks {
try {
// Get all indexed block hash
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
if (!unindexedBlockHeights?.length) {
return;
}
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
// Logging
let count = 0;
let countThisRun = 0;
@ -430,6 +462,46 @@ class Blocks {
}
}
/**
* [INDEXING] Index expected fees & weight for all audited blocks
*/
public async $generateAuditStats(): Promise<void> {
const blockIds = await BlocksAuditsRepository.$getBlocksWithoutSummaries();
if (!blockIds?.length) {
return;
}
let timer = Date.now();
let indexedThisRun = 0;
let indexedTotal = 0;
logger.debug(`Indexing ${blockIds.length} block audit details`);
for (const hash of blockIds) {
const summary = await BlocksSummariesRepository.$getTemplate(hash);
let totalFees = 0;
let totalWeight = 0;
for (const tx of summary?.transactions || []) {
totalFees += tx.fee;
totalWeight += (tx.vsize * 4);
}
await BlocksAuditsRepository.$setSummary(hash, totalFees, totalWeight);
const cachedBlock = this.blocks.find(block => block.id === hash);
if (cachedBlock) {
cachedBlock.extras.expectedFees = totalFees;
cachedBlock.extras.expectedWeight = totalWeight;
}
indexedThisRun++;
indexedTotal++;
const elapsedSeconds = (Date.now() - timer) / 1000;
if (elapsedSeconds > 5) {
const blockPerSeconds = indexedThisRun / elapsedSeconds;
logger.debug(`Indexed ${indexedTotal} / ${blockIds.length} block audit details (${blockPerSeconds.toFixed(1)}/s)`);
timer = Date.now();
indexedThisRun = 0;
}
}
logger.debug(`Indexing block audit details completed`);
}
/**
* [INDEXING] Index all blocks metadata for the mining dashboard
*/
@ -485,7 +557,7 @@ class Blocks {
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
newlyIndexed++;
@ -509,9 +581,16 @@ class Blocks {
return await BlocksRepository.$validateChain();
}
public async $updateBlocks() {
public async $updateBlocks(): Promise<number> {
// warn if this run stalls the main loop for more than 2 minutes
const timer = this.startTimer();
diskCache.lock();
let fastForwarded = false;
const blockHeightTip = await bitcoinApi.$getBlockHeightTip();
let handledBlocks = 0;
const blockHeightTip = await bitcoinCoreApi.$getBlockHeightTip();
this.updateTimerProgress(timer, 'got block height tip');
if (this.blocks.length === 0) {
this.currentBlockHeight = Math.max(blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT, -1);
@ -529,16 +608,21 @@ class Blocks {
if (!this.lastDifficultyAdjustmentTime) {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
this.updateTimerProgress(timer, 'got blockchain info for initial difficulty adjustment');
if (blockchainInfo.blocks === blockchainInfo.headers) {
const heightDiff = blockHeightTip % 2016;
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
this.updateTimerProgress(timer, 'got block hash for initial difficulty adjustment');
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
this.updateTimerProgress(timer, 'got block for initial difficulty adjustment');
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
if (blockHeightTip >= 2016) {
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
this.updateTimerProgress(timer, 'got previous block hash for initial difficulty adjustment');
const previousPeriodBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(previousPeriodBlockHash);
this.updateTimerProgress(timer, 'got previous block for initial difficulty adjustment');
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
logger.debug(`Initial difficulty adjustment data set.`);
}
@ -548,57 +632,83 @@ class Blocks {
}
while (this.currentBlockHeight < blockHeightTip) {
if (this.currentBlockHeight < blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT) {
if (this.currentBlockHeight === 0) {
this.currentBlockHeight = blockHeightTip;
} else {
this.currentBlockHeight++;
logger.debug(`New block found (#${this.currentBlockHeight})!`);
this.updateTimerProgress(timer, `getting orphaned blocks for ${this.currentBlockHeight}`);
await chainTips.updateOrphanedBlocks();
}
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
this.updateTimerProgress(timer, `getting block data for ${this.currentBlockHeight}`);
const blockHash = await bitcoinCoreApi.$getBlockHash(this.currentBlockHeight);
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
// start async callbacks
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
const txIds: string[] = verboseBlock.tx.map(tx => tx.txid);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, txIds, false, true) as MempoolTransactionExtended[];
if (config.MEMPOOL.BACKEND !== 'esplora') {
// fill in missing transaction fee data from verboseBlock
for (let i = 0; i < transactions.length; i++) {
if (!transactions[i].fee && transactions[i].txid === verboseBlock.tx[i].txid) {
transactions[i].fee = verboseBlock.tx[i].fee * 100_000_000;
}
}
}
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
const blockSummary: BlockSummary = this.summarizeBlockTransactions(block.id, cpfpSummary.transactions);
this.updateTimerProgress(timer, `got block data for ${this.currentBlockHeight}`);
if (Common.indexingEnabled()) {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
this.updateTimerProgress(timer, `got block by height for ${this.currentBlockHeight}`);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock.id) {
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`);
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`, logger.tags.mining);
// We assume there won't be a reorg with more than 10 block depth
this.updateTimerProgress(timer, `rolling back diverged chain from ${this.currentBlockHeight}`);
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock.height - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
this.blocks = this.blocks.slice(0, -10);
this.updateTimerProgress(timer, `rolled back chain divergence from ${this.currentBlockHeight}`);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock.height - i);
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
this.blocks.push(newBlock);
this.updateTimerProgress(timer, `reindexed block`);
let cpfpSummary;
if (config.MEMPOOL.CPFP_INDEXING) {
await this.$indexCPFP(newBlock.id, lastBlock.height - i);
cpfpSummary = await this.$indexCPFP(newBlock.id, lastBlock.height - i);
this.updateTimerProgress(timer, `reindexed block cpfp`);
}
await this.$getStrippedBlockTransactions(newBlock.id, true, true, cpfpSummary, newBlock.height);
this.updateTimerProgress(timer, `reindexed block summary`);
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`);
this.updateTimerProgress(timer, `reindexed difficulty adjustments`);
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`, logger.tags.mining);
indexer.reindex();
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
websocketHandler.handleReorg();
}
}
await blocksRepository.$saveBlockInDatabase(blockExtended);
this.updateTimerProgress(timer, `saved ${this.currentBlockHeight} to database`);
if (!fastForwarded) {
const lastestPriceId = await PricesRepository.$getLatestPriceId();
this.updateTimerProgress(timer, `got latest price id ${this.currentBlockHeight}`);
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
await blocksRepository.$saveBlockPrices([{
height: blockExtended.height,
priceId: lastestPriceId,
}]);
this.updateTimerProgress(timer, `saved prices for ${this.currentBlockHeight}`);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
logger.debug(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
@ -606,14 +716,20 @@ class Blocks {
// Save blocks summary for visualization if it's enabled
if (Common.blocksSummariesIndexingEnabled() === true) {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
await this.$getStrippedBlockTransactions(blockExtended.id, true, false, cpfpSummary, blockExtended.height);
this.updateTimerProgress(timer, `saved block summary for ${this.currentBlockHeight}`);
}
if (config.MEMPOOL.CPFP_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
this.$saveCpfp(blockExtended.id, this.currentBlockHeight, cpfpSummary);
this.updateTimerProgress(timer, `saved cpfp for ${this.currentBlockHeight}`);
}
}
}
// start async callbacks
this.updateTimerProgress(timer, `starting async callbacks for ${this.currentBlockHeight}`);
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
if (block.height % 2016 === 0) {
if (Common.indexingEnabled()) {
await DifficultyAdjustmentsRepository.$saveAdjustments({
@ -622,6 +738,7 @@ class Blocks {
difficulty: block.difficulty,
adjustment: Math.round((block.difficulty / this.currentDifficulty) * 1000000) / 1000000, // Remove float point noise
});
this.updateTimerProgress(timer, `saved difficulty adjustment for ${this.currentBlockHeight}`);
}
this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100;
@ -629,6 +746,11 @@ class Blocks {
this.currentDifficulty = block.difficulty;
}
// wait for pending async callbacks to finish
this.updateTimerProgress(timer, `waiting for async callbacks to complete for ${this.currentBlockHeight}`);
await Promise.all(callbackPromises);
this.updateTimerProgress(timer, `async callbacks completed for ${this.currentBlockHeight}`);
this.blocks.push(blockExtended);
if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
@ -641,12 +763,39 @@ class Blocks {
if (this.newBlockCallbacks.length) {
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
}
if (!memPool.hasPriority()) {
if (!memPool.hasPriority() && (block.height % config.MEMPOOL.DISK_CACHE_BLOCK_INTERVAL === 0)) {
diskCache.$saveCacheToDisk();
}
// wait for pending async callbacks to finish
await Promise.all(callbackPromises);
handledBlocks++;
}
diskCache.unlock();
this.clearTimer(timer);
return handledBlocks;
}
private startTimer() {
const state: any = {
start: Date.now(),
progress: 'begin $updateBlocks',
timer: null,
};
state.timer = setTimeout(() => {
logger.err(`$updateBlocks stalled at "${state.progress}"`);
}, this.mainLoopTimeout);
return state;
}
private updateTimerProgress(state, msg) {
state.progress = msg;
}
private clearTimer(state) {
if (state.timer) {
clearTimeout(state.timer);
}
}
@ -673,6 +822,16 @@ class Blocks {
return blockExtended;
}
public async $indexStaleBlock(hash: string): Promise<BlockExtended> {
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
blockExtended.canonical = await bitcoinApi.$getBlockHash(block.height);
return blockExtended;
}
/**
* Get one block by its hash
*/
@ -690,11 +849,15 @@ class Blocks {
// Bitcoin network, add our custom data on top
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
return await this.$indexBlock(block.height);
if (block.stale) {
return await this.$indexStaleBlock(hash);
} else {
return await this.$indexBlock(block.height);
}
}
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
skipDBLookup = false): Promise<TransactionStripped[]>
skipDBLookup = false, cpfpSummary?: CpfpSummary, blockHeight?: number): Promise<TransactionStripped[]>
{
if (skipMemoryCache === false) {
// Check the memory cache
@ -712,13 +875,35 @@ class Blocks {
}
}
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
const summary = this.summarizeBlock(block);
let height = blockHeight;
let summary: BlockSummary;
if (cpfpSummary) {
summary = {
id: hash,
transactions: cpfpSummary.transactions.map(tx => {
return {
txid: tx.txid,
fee: tx.fee,
vsize: tx.vsize,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0)),
rate: tx.effectiveFeePerVsize
};
}),
};
} else {
// Call Core RPC
const block = await bitcoinClient.getBlock(hash, 2);
summary = this.summarizeBlock(block);
height = block.height;
}
if (height == null) {
const block = await bitcoinApi.$getBlock(hash);
height = block.height;
}
// Index the response if needed
if (Common.blocksSummariesIndexingEnabled() === true) {
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
await BlocksSummariesRepository.$saveTransactions(height, hash, summary.transactions);
}
return summary.transactions;
@ -834,11 +1019,12 @@ class Blocks {
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(cleanBlock.height, cleanBlock.hash, summary.transactions);
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
if (cleanBlock.fee_amt_percentiles !== null) {
cleanBlock.median_fee_amt = cleanBlock.fee_amt_percentiles[3];
await blocksRepository.$updateFeeAmounts(cleanBlock.hash, cleanBlock.fee_amt_percentiles, cleanBlock.median_fee_amt);
}
}
@ -873,19 +1059,11 @@ class Blocks {
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
return BlocksAuditsRepository.$getBlockAudit(hash);
} else {
return null;
}
// fallback to non-audited transaction summary
if (!summary?.transactions?.length) {
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
summary = {
transactions: strippedTransactions
};
}
return summary;
}
public getLastDifficultyAdjustmentTime(): number {
@ -903,44 +1081,26 @@ class Blocks {
public async $indexCPFP(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx;
});
const clusters: any[] = [];
const summary = Common.calculateCpfp(height, transactions);
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
clusters.push({
root: cluster[0].txid,
height,
txs: cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
});
}
cluster = [];
ancestors = {};
await this.$saveCpfp(hash, height, summary);
const effectiveFeeStats = Common.calcEffectiveFeeStatistics(summary.transactions);
await blocksRepository.$saveEffectiveFeeStats(hash, effectiveFeeStats);
}
public async $saveCpfp(hash: string, height: number, cpfpSummary: CpfpSummary): Promise<void> {
try {
const result = await cpfpRepository.$batchSaveClusters(cpfpSummary.clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
const result = await cpfpRepository.$batchSaveClusters(clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
} catch (e) {
// not a fatal error, we'll try again next time the indexer runs
}
}
}

View file

@ -1,4 +1,6 @@
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import * as bitcoinjs from 'bitcoinjs-lib';
import { Request } from 'express';
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
@ -57,35 +59,66 @@ export class Common {
return arr;
}
static findRbfTransactions(added: TransactionExtended[], deleted: TransactionExtended[]): { [txid: string]: TransactionExtended } {
const matches: { [txid: string]: TransactionExtended } = {};
deleted
.forEach((deletedTx) => {
const foundMatches = added.find((addedTx) => {
static findRbfTransactions(added: MempoolTransactionExtended[], deleted: MempoolTransactionExtended[]): { [txid: string]: MempoolTransactionExtended[] } {
const matches: { [txid: string]: MempoolTransactionExtended[] } = {};
added
.forEach((addedTx) => {
const foundMatches = deleted.filter((deletedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
return addedTx.fee > deletedTx.fee
// The new transaction must pay more fee per kB than the replaced tx.
&& addedTx.feePerVsize > deletedTx.feePerVsize
&& addedTx.adjustedFeePerVsize > deletedTx.adjustedFeePerVsize
// Spends one or more of the same inputs
&& deletedTx.vin.some((deletedVin) =>
addedTx.vin.some((vin) => vin.txid === deletedVin.txid && vin.vout === deletedVin.vout));
});
if (foundMatches) {
matches[deletedTx.txid] = foundMatches;
if (foundMatches?.length) {
matches[addedTx.txid] = foundMatches;
}
});
return matches;
}
static findMinedRbfTransactions(minedTransactions: TransactionExtended[], spendMap: Map<string, MempoolTransactionExtended>): { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} {
const matches: { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }} = {};
for (const tx of minedTransactions) {
const replaced: Set<MempoolTransactionExtended> = new Set();
for (let i = 0; i < tx.vin.length; i++) {
const vin = tx.vin[i];
const match = spendMap.get(`${vin.txid}:${vin.vout}`);
if (match && match.txid !== tx.txid) {
replaced.add(match);
// remove this tx from the spendMap
// prevents the same tx being replaced more than once
for (const replacedVin of match.vin) {
const key = `${replacedVin.txid}:${replacedVin.vout}`;
spendMap.delete(key);
}
}
const key = `${vin.txid}:${vin.vout}`;
spendMap.delete(key);
}
if (replaced.size) {
matches[tx.txid] = { replaced: Array.from(replaced), replacedBy: tx };
}
}
return matches;
}
static stripTransaction(tx: TransactionExtended): TransactionStripped {
return {
txid: tx.txid,
fee: tx.fee,
vsize: tx.weight / 4,
value: tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0),
rate: tx.effectiveFeePerVsize,
};
}
static stripTransactions(txs: TransactionExtended[]): TransactionStripped[] {
return txs.map(this.stripTransaction);
}
static sleep$(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(() => {
@ -101,18 +134,18 @@ export class Common {
}
}
static setRelativesAndGetCpfpInfo(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): CpfpInfo {
static setRelativesAndGetCpfpInfo(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): CpfpInfo {
const parents = this.findAllParents(tx, memPool);
const lowerFeeParents = parents.filter((parent) => parent.feePerVsize < tx.effectiveFeePerVsize);
const lowerFeeParents = parents.filter((parent) => parent.adjustedFeePerVsize < tx.effectiveFeePerVsize);
let totalWeight = tx.weight + lowerFeeParents.reduce((prev, val) => prev + val.weight, 0);
let totalWeight = (tx.adjustedVsize * 4) + lowerFeeParents.reduce((prev, val) => prev + (val.adjustedVsize * 4), 0);
let totalFees = tx.fee + lowerFeeParents.reduce((prev, val) => prev + val.fee, 0);
tx.ancestors = parents
.map((t) => {
return {
txid: t.txid,
weight: t.weight,
weight: (t.adjustedVsize * 4),
fee: t.fee,
};
});
@ -133,8 +166,8 @@ export class Common {
}
private static findAllParents(tx: TransactionExtended, memPool: { [txid: string]: TransactionExtended }): TransactionExtended[] {
let parents: TransactionExtended[] = [];
private static findAllParents(tx: MempoolTransactionExtended, memPool: { [txid: string]: MempoolTransactionExtended }): MempoolTransactionExtended[] {
let parents: MempoolTransactionExtended[] = [];
tx.vin.forEach((parent) => {
if (parents.find((p) => p.txid === parent.txid)) {
return;
@ -142,17 +175,17 @@ export class Common {
const parentTx = memPool[parent.txid];
if (parentTx) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.feePerVsize) {
if (tx.bestDescendant && tx.bestDescendant.fee / (tx.bestDescendant.weight / 4) > parentTx.adjustedFeePerVsize) {
if (parentTx.bestDescendant && parentTx.bestDescendant.fee < tx.fee + tx.bestDescendant.fee) {
parentTx.bestDescendant = {
weight: tx.weight + tx.bestDescendant.weight,
weight: (tx.adjustedVsize * 4) + tx.bestDescendant.weight,
fee: tx.fee + tx.bestDescendant.fee,
txid: tx.txid,
};
}
} else if (tx.feePerVsize > parentTx.feePerVsize) {
} else if (tx.adjustedFeePerVsize > parentTx.adjustedFeePerVsize) {
parentTx.bestDescendant = {
weight: tx.weight,
weight: (tx.adjustedVsize * 4),
fee: tx.fee,
txid: tx.txid
};
@ -164,6 +197,30 @@ export class Common {
return parents;
}
// calculates the ratio of matched transactions to projected transactions by weight
static getSimilarity(projectedBlock: MempoolBlockWithTransactions, transactions: TransactionExtended[]): number {
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const tx of projectedBlock.transactions) {
if (inBlock[tx.txid]) {
matchedWeight += tx.vsize * 4;
}
projectedWeight += tx.vsize * 4;
}
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
return projectedWeight ? matchedWeight / projectedWeight : 1;
}
static getSqlInterval(interval: string | null): string | null {
switch (interval) {
case '24h': return '1 DAY';
@ -321,4 +378,364 @@ export class Common {
};
}
}
static calculateCpfp(height: number, transactions: TransactionExtended[]): CpfpSummary {
const clusters: CpfpCluster[] = []; // list of all cpfp clusters in this block
const clusterMap: { [txid: string]: CpfpCluster } = {}; // map transactions to their cpfp cluster
let clusterTxs: TransactionExtended[] = []; // working list of elements of the current cluster
let ancestors: { [txid: string]: boolean } = {}; // working set of ancestors of the current cluster root
const txMap = {};
// initialize the txMap
for (const tx of transactions) {
txMap[tx.txid] = tx;
}
// reverse pass to identify CPFP clusters
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
clusterTxs.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += (tx.weight / 4);
});
const effectiveFeePerVsize = totalFee / totalVSize;
let cluster: CpfpCluster;
if (clusterTxs.length > 1) {
cluster = {
root: clusterTxs[0].txid,
height,
txs: clusterTxs.map(tx => { return { txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
};
clusters.push(cluster);
}
clusterTxs.forEach(tx => {
txMap[tx.txid].effectiveFeePerVsize = effectiveFeePerVsize;
if (cluster) {
clusterMap[tx.txid] = cluster;
}
});
// reset working vars
clusterTxs = [];
ancestors = {};
}
clusterTxs.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
// forward pass to enforce ancestor rate caps
for (const tx of transactions) {
let minAncestorRate = tx.effectiveFeePerVsize;
for (const vin of tx.vin) {
if (txMap[vin.txid]?.effectiveFeePerVsize) {
minAncestorRate = Math.min(minAncestorRate, txMap[vin.txid].effectiveFeePerVsize);
}
}
// check rounded values to skip cases with almost identical fees
const roundedMinAncestorRate = Math.ceil(minAncestorRate);
const roundedEffectiveFeeRate = Math.floor(tx.effectiveFeePerVsize);
if (roundedMinAncestorRate < roundedEffectiveFeeRate) {
tx.effectiveFeePerVsize = minAncestorRate;
if (!clusterMap[tx.txid]) {
// add a single-tx cluster to record the dependent rate
const cluster = {
root: tx.txid,
height,
txs: [{ txid: tx.txid, weight: tx.weight, fee: tx.fee || 0 }],
effectiveFeePerVsize: minAncestorRate,
};
clusterMap[tx.txid] = cluster;
clusters.push(cluster);
} else {
// update the existing cluster with the dependent rate
clusterMap[tx.txid].effectiveFeePerVsize = minAncestorRate;
}
}
}
return {
transactions,
clusters,
};
}
static calcEffectiveFeeStatistics(transactions: { weight: number, fee: number, effectiveFeePerVsize?: number, txid: string }[]): EffectiveFeeStats {
const sortedTxs = transactions.map(tx => { return { txid: tx.txid, weight: tx.weight, rate: tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4)) }; }).sort((a, b) => a.rate - b.rate);
let weightCount = 0;
let medianFee = 0;
let medianWeight = 0;
// calculate the "medianFee" as the average fee rate of the middle 10000 weight units of transactions
const leftBound = 1995000;
const rightBound = 2005000;
for (let i = 0; i < sortedTxs.length && weightCount < rightBound; i++) {
const left = weightCount;
const right = weightCount + sortedTxs[i].weight;
if (right > leftBound) {
const weight = Math.min(right, rightBound) - Math.max(left, leftBound);
medianFee += (sortedTxs[i].rate * (weight / 4) );
medianWeight += weight;
}
weightCount += sortedTxs[i].weight;
}
const medianFeeRate = medianWeight ? (medianFee / (medianWeight / 4)) : 0;
// minimum effective fee heuristic:
// lowest of
// a) the 1st percentile of effective fee rates
// b) the minimum effective fee rate in the last 2% of transactions (in block order)
const minFee = Math.min(
Common.getNthPercentile(1, sortedTxs).rate,
transactions.slice(-transactions.length / 50).reduce((min, tx) => { return Math.min(min, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, Infinity)
);
// maximum effective fee heuristic:
// highest of
// a) the 99th percentile of effective fee rates
// b) the maximum effective fee rate in the first 2% of transactions (in block order)
const maxFee = Math.max(
Common.getNthPercentile(99, sortedTxs).rate,
transactions.slice(0, transactions.length / 50).reduce((max, tx) => { return Math.max(max, tx.effectiveFeePerVsize || ((tx.fee || 0) / (tx.weight / 4))); }, 0)
);
return {
medianFee: medianFeeRate,
feeRange: [
minFee,
[10,25,50,75,90].map(n => Common.getNthPercentile(n, sortedTxs).rate),
maxFee,
].flat(),
};
}
static getNthPercentile(n: number, sortedDistribution: any[]): any {
return sortedDistribution[Math.floor((sortedDistribution.length - 1) * (n / 100))];
}
static getTransactionFromRequest(req: Request, form: boolean): string {
let rawTx: any = typeof req.body === 'object' && form
? Object.values(req.body)[0] as any
: req.body;
if (typeof rawTx !== 'string') {
throw Object.assign(new Error('Non-string request body'), { code: -1 });
}
// Support both upper and lower case hex
// Support both txHash= Form and direct API POST
const reg = form ? /^txHash=((?:[a-fA-F0-9]{2})+)$/ : /^((?:[a-fA-F0-9]{2})+)$/;
const matches = reg.exec(rawTx);
if (!matches || !matches[1]) {
throw Object.assign(new Error('Non-hex request body'), { code: -2 });
}
// Guaranteed to be a hex string of multiple of 2
// Guaranteed to be lower case
// Guaranteed to pass validation (see function below)
return this.validateTransactionHex(matches[1].toLowerCase());
}
private static validateTransactionHex(txhex: string): string {
// Do not mutate txhex
// We assume txhex to be valid hex (output of getTransactionFromRequest above)
// Check 1: Valid transaction parse
let tx: bitcoinjs.Transaction;
try {
tx = bitcoinjs.Transaction.fromHex(txhex);
} catch(e) {
throw Object.assign(new Error('Invalid transaction (could not parse)'), { code: -4 });
}
// Check 2: Simple size check
if (tx.weight() > config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT) {
throw Object.assign(new Error(`Transaction too large (max ${config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT} weight units)`), { code: -3 });
}
// Check 3: Check unreachable script in taproot (if not allowed)
if (!config.MEMPOOL.ALLOW_UNREACHABLE) {
tx.ins.forEach(input => {
const witness = input.witness;
// See BIP 341: Script validation rules
const hasAnnex = witness.length >= 2 &&
witness[witness.length - 1][0] === 0x50;
const scriptSpendMinLength = hasAnnex ? 3 : 2;
const maybeScriptSpend = witness.length >= scriptSpendMinLength;
if (maybeScriptSpend) {
const controlBlock = witness[witness.length - scriptSpendMinLength + 1];
if (controlBlock.length === 0 || !this.isValidLeafVersion(controlBlock[0])) {
// Skip this input, it's not taproot
return;
}
// Definitely taproot. Get script
const script = witness[witness.length - scriptSpendMinLength];
const decompiled = bitcoinjs.script.decompile(script);
if (!decompiled || decompiled.length < 2) {
// Skip this input
return;
}
// Iterate up to second last (will look ahead 1 item)
for (let i = 0; i < decompiled.length - 1; i++) {
const first = decompiled[i];
const second = decompiled[i + 1];
if (
first === bitcoinjs.opcodes.OP_FALSE &&
second === bitcoinjs.opcodes.OP_IF
) {
throw Object.assign(new Error('Unreachable taproot scripts not allowed'), { code: -5 });
}
}
}
})
}
// Pass through the input string untouched
return txhex;
}
private static isValidLeafVersion(leafVersion: number): boolean {
// See Note 7 in BIP341
// https://github.com/bitcoin/bips/blob/66a1a8151021913047934ebab3f8883f2f8ca75b/bip-0341.mediawiki#cite_note-7
// "What constraints are there on the leaf version?"
// Must be an integer between 0 and 255
// Since we're parsing a byte
if (Math.floor(leafVersion) !== leafVersion || leafVersion < 0 || leafVersion > 255) {
return false;
}
// "the leaf version cannot be odd"
if ((leafVersion & 0x01) === 1) {
return false;
}
// "The values that comply to this rule are
// the 32 even values between 0xc0 and 0xfe
if (leafVersion >= 0xc0 && leafVersion <= 0xfe) {
return true;
}
// and also 0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe."
if ([0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe].includes(leafVersion)) {
return true;
}
// Otherwise, invalid
return false;
}
}
/**
* Class to calculate average fee rates of a list of transactions
* at certain weight percentiles, in a single pass
*
* init with:
* maxWeight - the total weight to measure percentiles relative to (e.g. 4MW for a single block)
* percentileBandWidth - how many weight units to average over for each percentile (as a % of maxWeight)
* percentiles - an array of weight percentiles to compute, in %
*
* then call .processNext(tx) for each transaction, in descending order
*
* retrieve the final results with .getFeeStats()
*/
export class OnlineFeeStatsCalculator {
private maxWeight: number;
private percentiles = [10,25,50,75,90];
private bandWidthPercent = 2;
private bandWidth: number = 0;
private bandIndex = 0;
private leftBound = 0;
private rightBound = 0;
private inBand = false;
private totalBandFee = 0;
private totalBandWeight = 0;
private minBandRate = Infinity;
private maxBandRate = 0;
private feeRange: { avg: number, min: number, max: number }[] = [];
private totalWeight: number = 0;
constructor (maxWeight: number, percentileBandWidth?: number, percentiles?: number[]) {
this.maxWeight = maxWeight;
if (percentiles && percentiles.length) {
this.percentiles = percentiles;
}
if (percentileBandWidth != null) {
this.bandWidthPercent = percentileBandWidth;
}
this.bandWidth = this.maxWeight * (this.bandWidthPercent / 100);
// add min/max percentiles aligned to the ends of the range
this.percentiles.unshift(this.bandWidthPercent / 2);
this.percentiles.push(100 - (this.bandWidthPercent / 2));
this.setNextBounds();
}
processNext(tx: { weight: number, fee: number, effectiveFeePerVsize?: number, feePerVsize?: number, rate?: number, txid: string }): void {
let left = this.totalWeight;
const right = this.totalWeight + tx.weight;
if (!this.inBand && right <= this.leftBound) {
this.totalWeight += tx.weight;
return;
}
while (left < right) {
if (right > this.leftBound) {
this.inBand = true;
const txRate = (tx.rate || tx.effectiveFeePerVsize || tx.feePerVsize || 0);
const weight = Math.min(right, this.rightBound) - Math.max(left, this.leftBound);
this.totalBandFee += (txRate * weight);
this.totalBandWeight += weight;
this.maxBandRate = Math.max(this.maxBandRate, txRate);
this.minBandRate = Math.min(this.minBandRate, txRate);
}
left = Math.min(right, this.rightBound);
if (left >= this.rightBound) {
this.inBand = false;
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
this.bandIndex++;
this.setNextBounds();
this.totalBandFee = 0;
this.totalBandWeight = 0;
this.minBandRate = Infinity;
this.maxBandRate = 0;
}
}
this.totalWeight += tx.weight;
}
private setNextBounds(): void {
const nextPercentile = this.percentiles[this.bandIndex];
if (nextPercentile != null) {
this.leftBound = ((nextPercentile / 100) * this.maxWeight) - (this.bandWidth / 2);
this.rightBound = this.leftBound + this.bandWidth;
} else {
this.leftBound = Infinity;
this.rightBound = Infinity;
}
}
getRawFeeStats(): WorkingEffectiveFeeStats {
if (this.totalBandWeight > 0) {
const avgBandFeeRate = this.totalBandWeight ? (this.totalBandFee / this.totalBandWeight) : 0;
this.feeRange.unshift({ avg: avgBandFeeRate, min: this.minBandRate, max: this.maxBandRate });
}
while (this.feeRange.length < this.percentiles.length) {
this.feeRange.unshift({ avg: 0, min: 0, max: 0 });
}
return {
minFee: this.feeRange[0].min,
medianFee: this.feeRange[Math.floor(this.feeRange.length / 2)].avg,
maxFee: this.feeRange[this.feeRange.length - 1].max,
feeRange: this.feeRange.map(f => f.avg),
};
}
getFeeStats(): EffectiveFeeStats {
const stats = this.getRawFeeStats();
stats.feeRange[0] = stats.minFee;
stats.feeRange[stats.feeRange.length - 1] = stats.maxFee;
return stats;
}
}

View file

@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 58;
private static currentVersion = 64;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@ -497,6 +497,7 @@ class DatabaseMigration {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}
@ -510,6 +511,43 @@ class DatabaseMigration {
// We only run some migration queries for this version
await this.updateToSchemaVersion(58);
}
if (databaseSchemaVersion < 59 && (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet')) {
// https://github.com/mempool/mempool/issues/3360
await this.$executeQuery(`TRUNCATE prices`);
}
if (databaseSchemaVersion < 60 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD sigop_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(60);
}
if (databaseSchemaVersion < 61 && isBitcoin === true) {
// Break block templates into their own table
if (! await this.$checkIfTableExists('blocks_templates')) {
await this.$executeQuery('CREATE TABLE blocks_templates AS SELECT id, template FROM blocks_summaries WHERE template != "[]"');
}
await this.$executeQuery('ALTER TABLE blocks_templates MODIFY template JSON DEFAULT "[]"');
await this.$executeQuery('ALTER TABLE blocks_templates ADD PRIMARY KEY (id)');
await this.$executeQuery('ALTER TABLE blocks_summaries DROP COLUMN template');
await this.updateToSchemaVersion(61);
}
if (databaseSchemaVersion < 62 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD expected_fees BIGINT UNSIGNED DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD expected_weight BIGINT UNSIGNED DEFAULT NULL');
await this.updateToSchemaVersion(62);
}
if (databaseSchemaVersion < 63 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fullrbf_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(63);
}
if (databaseSchemaVersion < 64 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD features text NULL');
await this.updateToSchemaVersion(64);
}
}
/**
@ -1028,7 +1066,7 @@ class DatabaseMigration {
}
public async $blocksReindexingTruncate(): Promise<void> {
logger.warn(`Truncating pools, blocks and hashrates for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
logger.warn(`Truncating pools, blocks, hashrates and difficulty_adjustments tables for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
await Common.sleep$(5000);
await this.$executeQuery(`TRUNCATE blocks`);

View file

@ -24,31 +24,29 @@ export function calcDifficultyAdjustment(
network: string,
latestBlockTimestamp: number,
): DifficultyAdjustment {
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
const diffSeconds = nowSeconds - DATime;
const diffSeconds = Math.max(0, nowSeconds - DATime);
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
const actualTimespan = (blocksInEpoch === 2015 ? latestBlockTimestamp : nowSeconds) - DATime;
let difficultyChange = 0;
let timeAvgSecs = diffSeconds / blocksInEpoch;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
let timeAvgSecs = blocksInEpoch ? diffSeconds / blocksInEpoch : BLOCK_SECONDS_TARGET;
difficultyChange = (BLOCK_SECONDS_TARGET / (actualTimespan / (blocksInEpoch + 1)) - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
difficultyChange = 300;
}
// Max decrease is /4 (-75%)
if (difficultyChange < -75) {
difficultyChange = -75;
}
// Testnet difficulty is set to 1 after 20 minutes of no blocks,

View file

@ -7,32 +7,38 @@ import logger from '../logger';
import config from '../config';
import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
import rbfCache from './rbf-cache';
class DiskCache {
private cacheSchemaVersion = 3;
private rbfCacheSchemaVersion = 1;
private static TMP_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-cache.json';
private static TMP_FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/tmp-cache{number}.json';
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
private static TMP_RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-rbfcache.json';
private static RBF_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/rbfcache.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
private ignoreBlocksCache = false;
private semaphore: { resume: (() => void)[], locks: number } = {
resume: [],
locks: 0,
};
constructor() {
if (!cluster.isMaster) {
if (!cluster.isPrimary) {
return;
}
process.on('SIGINT', (e) => {
this.saveCacheToDiskSync();
process.exit(2);
});
process.on('SIGTERM', (e) => {
this.saveCacheToDiskSync();
process.exit(2);
this.$saveCacheToDisk(true);
process.exit(0);
});
}
async $saveCacheToDisk(): Promise<void> {
async $saveCacheToDisk(sync: boolean = false): Promise<void> {
if (!cluster.isPrimary) {
return;
}
@ -41,85 +47,95 @@ class DiskCache {
return;
}
try {
logger.debug('Writing mempool and blocks data to disk cache (async)...');
logger.debug(`Writing mempool and blocks data to disk cache (${ sync ? 'sync' : 'async' })...`);
this.isWritingCache = true;
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
mempoolArray.push(mempool[tx]);
if (mempool[tx]) {
mempoolArray.push(mempool[tx]);
}
}
Common.shuffleArray(mempoolArray);
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
if (sync) {
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
} else {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await this.$yield();
await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
await fsPromises.rename(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.rename(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
}
logger.debug('Mempool and blocks data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
}
saveCacheToDiskSync(): void {
if (!cluster.isPrimary) {
return;
}
if (this.isWritingCache) {
logger.debug('Saving cache already in progress. Skipping.');
return;
}
try {
logger.debug('Writing mempool and blocks data to disk cache (sync)...');
logger.debug('Writing rbf data to disk cache (async)...');
this.isWritingCache = true;
const mempool = memPool.getMempool();
const mempoolArray: TransactionExtended[] = [];
for (const tx in mempool) {
mempoolArray.push(mempool[tx]);
}
Common.shuffleArray(mempoolArray);
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
const rbfData = rbfCache.dump();
if (sync) {
fs.writeFileSync(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
fs.renameSync(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
} else {
await fsPromises.writeFile(DiskCache.TMP_RBF_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
rbfCacheSchemaVersion: this.rbfCacheSchemaVersion,
rbf: rbfData,
}), { flag: 'w' });
await fsPromises.rename(DiskCache.TMP_RBF_FILE_NAME, DiskCache.RBF_FILE_NAME);
}
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
logger.debug('Mempool and blocks data saved to disk cache');
logger.debug('Rbf data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
logger.warn('Error writing to cache file: ' + (e instanceof Error ? e.message : e));
logger.warn('Error writing rbf data to cache file: ' + (e instanceof Error ? e.message : e));
this.isWritingCache = false;
}
}
@ -146,7 +162,19 @@ class DiskCache {
}
}
loadMempoolCache(): void {
wipeRbfCache() {
logger.notice(`Wipping nodejs backend cache/rbfcache.json file`);
try {
fs.unlinkSync(DiskCache.RBF_FILE_NAME);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${DiskCache.RBF_FILE_NAME}. Exception ${JSON.stringify(e)}`);
}
}
}
async $loadMempoolCache(): Promise<void> {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
@ -160,9 +188,14 @@ class DiskCache {
logger.notice('Disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.network && data.network !== config.MEMPOOL.NETWORK) {
logger.notice('Disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.mempoolArray) {
for (const tx of data.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
}
@ -175,6 +208,7 @@ class DiskCache {
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
if (cacheData2.mempoolArray) {
for (const tx of cacheData2.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx;
}
} else {
@ -182,16 +216,74 @@ class DiskCache {
}
}
} catch (e) {
logger.info('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Error parsing ' + fileName + '. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
memPool.setMempool(data.mempool);
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
await memPool.$setMempool(data.mempool);
if (!this.ignoreBlocksCache) {
blocks.setBlocks(data.blocks);
blocks.setBlockSummaries(data.blockSummaries || []);
} else {
logger.info('Re-saving cache with empty recent blocks data');
await this.$saveCacheToDisk(true);
}
} catch (e) {
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
try {
let rbfData: any = {};
const rbfCacheData = fs.readFileSync(DiskCache.RBF_FILE_NAME, 'utf8');
if (rbfCacheData) {
logger.info('Restoring rbf data from disk cache');
rbfData = JSON.parse(rbfCacheData);
if (rbfData.rbfCacheSchemaVersion === undefined || rbfData.rbfCacheSchemaVersion !== this.rbfCacheSchemaVersion) {
logger.notice('Rbf disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
if (rbfData.network && rbfData.network !== config.MEMPOOL.NETWORK) {
logger.notice('Rbf disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeRbfCache();
}
}
if (rbfData?.rbf) {
rbfCache.load(rbfData.rbf);
}
} catch (e) {
logger.warn('Failed to parse rbf cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
}
}
private $yield(): Promise<void> {
if (this.semaphore.locks) {
logger.debug('Pause writing mempool and blocks data to disk cache (async)');
return new Promise((resolve) => {
this.semaphore.resume.push(resolve);
});
} else {
return Promise.resolve();
}
}
public lock(): void {
this.semaphore.locks++;
}
public unlock(): void {
this.semaphore.locks = Math.max(0, this.semaphore.locks - 1);
if (!this.semaphore.locks && this.semaphore.resume.length) {
const nextResume = this.semaphore.resume.shift();
if (nextResume) {
logger.debug('Resume writing mempool and blocks data to disk cache (async)');
nextResume();
}
}
}
public setIgnoreBlocksCache(): void {
this.ignoreBlocksCache = true;
}
}

View file

@ -117,6 +117,26 @@ class ChannelsApi {
}
}
public async $getPenaltyClosedChannels(): Promise<any[]> {
try {
const query = `
SELECT n1.alias AS alias_left,
n2.alias AS alias_right,
channels.*
FROM channels
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
WHERE channels.status = 2 AND channels.closing_reason = 3
ORDER BY closing_date DESC
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getPenaltyClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;

View file

@ -11,6 +11,7 @@ class ChannelsRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/penalties', this.$getPenaltyClosedChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
;
@ -108,6 +109,18 @@ class ChannelsRoutes {
}
}
private async $getPenaltyClosedChannels(req: Request, res: Response): Promise<void> {
try {
const channels = await channelsApi.$getPenaltyClosedChannels();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(channels);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getAllChannelsGeo(req: Request, res: Response) {
try {
const style: string = typeof req.query.style === 'string' ? req.query.style : '';

View file

@ -3,6 +3,7 @@ import DB from '../../database';
import { ResultSetHeader } from 'mysql2';
import { ILightningApi } from '../lightning/lightning-api.interface';
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
import { bin2hex } from '../../utils/format';
class NodesApi {
public async $getWorldNodes(): Promise<any> {
@ -56,7 +57,8 @@ class NodesApi {
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
as_number, city_id, country_id, subdivision_id, longitude, latitude,
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
geo_names_country.names as country, geo_names_subdivision.names as subdivision
geo_names_country.names as country, geo_names_subdivision.names as subdivision,
features
FROM nodes
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
@ -76,6 +78,23 @@ class NodesApi {
node.city = JSON.parse(node.city);
node.country = JSON.parse(node.country);
// Features
node.features = JSON.parse(node.features);
node.featuresBits = null;
if (node.features) {
let maxBit = 0;
for (const feature of node.features) {
maxBit = Math.max(maxBit, feature.bit);
}
maxBit = Math.ceil(maxBit / 4) * 4 - 1;
node.featuresBits = new Array(maxBit + 1).fill(0);
for (const feature of node.features) {
node.featuresBits[feature.bit] = 1;
}
node.featuresBits = bin2hex(node.featuresBits.reverse().join(''));
}
// Active channels and capacity
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
@ -373,7 +392,7 @@ class NodesApi {
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace('%', '') + '%';
const publicKeySearch = search.replace(/[^a-zA-Z0-9]/g, '') + '%';
const aliasSearch = search
.replace(/[-_.]/g, ' ') // Replace all -_. characters with empty space. Eg: "ln.nicehash" becomes "ln nicehash".
.replace(/[^a-zA-Z0-9 ]/g, '') // Remove all special characters and keep just A to Z, 0 to 9.
@ -656,10 +675,19 @@ class NodesApi {
alias_search,
color,
sockets,
status
status,
features
)
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1, ?)
ON DUPLICATE KEY UPDATE
updated_at = FROM_UNIXTIME(?),
alias = ?,
alias_search = ?,
color = ?,
sockets = ?,
status = 1,
features = ?
`;
await DB.query(query, [
node.pub_key,
@ -668,11 +696,13 @@ class NodesApi {
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
node.last_update,
node.alias,
this.aliasToSearchText(node.alias),
node.color,
sockets,
JSON.stringify(node.features),
]);
} catch (e) {
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));

View file

@ -2,8 +2,91 @@ import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
import { hex2bin } from '../../../utils/format';
import config from '../../../config';
// https://github.com/lightningnetwork/lnd/blob/master/lnwire/features.go
export enum FeatureBits {
DataLossProtectRequired = 0,
DataLossProtectOptional = 1,
InitialRoutingSync = 3,
UpfrontShutdownScriptRequired = 4,
UpfrontShutdownScriptOptional = 5,
GossipQueriesRequired = 6,
GossipQueriesOptional = 7,
TLVOnionPayloadRequired = 8,
TLVOnionPayloadOptional = 9,
StaticRemoteKeyRequired = 12,
StaticRemoteKeyOptional = 13,
PaymentAddrRequired = 14,
PaymentAddrOptional = 15,
MPPRequired = 16,
MPPOptional = 17,
WumboChannelsRequired = 18,
WumboChannelsOptional = 19,
AnchorsRequired = 20,
AnchorsOptional = 21,
AnchorsZeroFeeHtlcTxRequired = 22,
AnchorsZeroFeeHtlcTxOptional = 23,
ShutdownAnySegwitRequired = 26,
ShutdownAnySegwitOptional = 27,
AMPRequired = 30,
AMPOptional = 31,
ExplicitChannelTypeRequired = 44,
ExplicitChannelTypeOptional = 45,
ScidAliasRequired = 46,
ScidAliasOptional = 47,
PaymentMetadataRequired = 48,
PaymentMetadataOptional = 49,
ZeroConfRequired = 50,
ZeroConfOptional = 51,
KeysendRequired = 54,
KeysendOptional = 55,
ScriptEnforcedLeaseRequired = 2022,
ScriptEnforcedLeaseOptional = 2023,
MaxBolt11Feature = 5114,
};
export const FeaturesMap = new Map<FeatureBits, string>([
[FeatureBits.DataLossProtectRequired, 'data-loss-protect'],
[FeatureBits.DataLossProtectOptional, 'data-loss-protect'],
[FeatureBits.InitialRoutingSync, 'initial-routing-sync'],
[FeatureBits.UpfrontShutdownScriptRequired, 'upfront-shutdown-script'],
[FeatureBits.UpfrontShutdownScriptOptional, 'upfront-shutdown-script'],
[FeatureBits.GossipQueriesRequired, 'gossip-queries'],
[FeatureBits.GossipQueriesOptional, 'gossip-queries'],
[FeatureBits.TLVOnionPayloadRequired, 'tlv-onion'],
[FeatureBits.TLVOnionPayloadOptional, 'tlv-onion'],
[FeatureBits.StaticRemoteKeyOptional, 'static-remote-key'],
[FeatureBits.StaticRemoteKeyRequired, 'static-remote-key'],
[FeatureBits.PaymentAddrOptional, 'payment-addr'],
[FeatureBits.PaymentAddrRequired, 'payment-addr'],
[FeatureBits.MPPOptional, 'multi-path-payments'],
[FeatureBits.MPPRequired, 'multi-path-payments'],
[FeatureBits.AnchorsRequired, 'anchor-commitments'],
[FeatureBits.AnchorsOptional, 'anchor-commitments'],
[FeatureBits.AnchorsZeroFeeHtlcTxRequired, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.AnchorsZeroFeeHtlcTxOptional, 'anchors-zero-fee-htlc-tx'],
[FeatureBits.WumboChannelsRequired, 'wumbo-channels'],
[FeatureBits.WumboChannelsOptional, 'wumbo-channels'],
[FeatureBits.AMPRequired, 'amp'],
[FeatureBits.AMPOptional, 'amp'],
[FeatureBits.PaymentMetadataOptional, 'payment-metadata'],
[FeatureBits.PaymentMetadataRequired, 'payment-metadata'],
[FeatureBits.ExplicitChannelTypeOptional, 'explicit-commitment-type'],
[FeatureBits.ExplicitChannelTypeRequired, 'explicit-commitment-type'],
[FeatureBits.KeysendOptional, 'keysend'],
[FeatureBits.KeysendRequired, 'keysend'],
[FeatureBits.ScriptEnforcedLeaseRequired, 'script-enforced-lease'],
[FeatureBits.ScriptEnforcedLeaseOptional, 'script-enforced-lease'],
[FeatureBits.ScidAliasRequired, 'scid-alias'],
[FeatureBits.ScidAliasOptional, 'scid-alias'],
[FeatureBits.ZeroConfRequired, 'zero-conf'],
[FeatureBits.ZeroConfOptional, 'zero-conf'],
[FeatureBits.ShutdownAnySegwitRequired, 'shutdown-any-segwit'],
[FeatureBits.ShutdownAnySegwitOptional, 'shutdown-any-segwit'],
]);
/**
* Convert a clightning "listnode" entry to a lnd node entry
*/
@ -17,10 +100,36 @@ export function convertNode(clNode: any): ILightningApi.Node {
custom_records = undefined;
}
}
const nodeFeatures: ILightningApi.Feature[] = [];
const nodeFeaturesBinary = hex2bin(clNode.features).split('').reverse().join('');
for (let i = 0; i < nodeFeaturesBinary.length; i++) {
if (nodeFeaturesBinary[i] === '0') {
continue;
}
const feature = FeaturesMap.get(i);
if (!feature) {
nodeFeatures.push({
bit: i,
name: 'unknown',
is_required: i % 2 === 0,
is_known: false
});
} else {
nodeFeatures.push({
bit: i,
name: feature,
is_required: i % 2 === 0,
is_known: true
});
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
features: [], // TODO parse and return clNode.feature
features: nodeFeatures,
pub_key: clNode.nodeid,
addresses: clNode.addresses?.map((addr) => {
let address = addr.address;

View file

@ -79,6 +79,7 @@ export namespace ILightningApi {
}
export interface Feature {
bit: number;
name: string;
is_required: boolean;
is_known: boolean;

View file

@ -4,21 +4,29 @@ import * as fs from 'fs';
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
import { ILightningApi } from '../lightning-api.interface';
import config from '../../../config';
import logger from '../../../logger';
class LndApi implements AbstractLightningApi {
axiosConfig: AxiosRequestConfig = {};
constructor() {
if (config.LIGHTNING.ENABLED) {
if (!config.LIGHTNING.ENABLED) {
return;
}
try {
this.axiosConfig = {
headers: {
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex'),
},
httpsAgent: new Agent({
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
}),
timeout: 10000
timeout: config.LND.TIMEOUT
};
} catch (e) {
config.LIGHTNING.ENABLED = false;
logger.updateNetwork();
logger.err(`Could not initialize LND Macaroon/TLS Cert. Disabling LIGHTNING. ` + (e instanceof Error ? e.message : e));
}
}
@ -33,8 +41,23 @@ class LndApi implements AbstractLightningApi {
}
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
const graph = await axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
.then((response) => response.data);
for (const node of graph.nodes) {
const nodeFeatures: ILightningApi.Feature[] = [];
for (const bit in node.features) {
nodeFeatures.push({
bit: parseInt(bit, 10),
name: node.features[bit].name,
is_required: node.features[bit].is_required,
is_known: node.features[bit].is_known,
});
}
node.features = nodeFeatures;
}
return graph;
}
}

View file

@ -2,7 +2,7 @@ import * as fs from 'fs';
import logger from '../../logger';
class Icons {
private static FILE_NAME = './icons.json';
private static FILE_NAME = '/elements/asset_registry_db/icons.json';
private iconIds: string[] = [];
private icons: { [assetId: string]: string; } = {};

View file

@ -1,16 +1,22 @@
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction } from '../../rust-gbt';
import logger from '../logger';
import { MempoolBlock, TransactionExtended, ThreadTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common';
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces';
import { Common, OnlineFeeStatsCalculator } from './common';
import config from '../config';
import { Worker } from 'worker_threads';
import path from 'path';
const MAX_UINT32 = Math.pow(2, 32) - 1;
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private txSelectionWorker: Worker | null = null;
private rustInitialized: boolean = false;
private rustGbtGenerator: GbtGenerator = new GbtGenerator();
constructor() {}
private nextUid: number = 1;
private uidMap: Map<number, string> = new Map(); // map short numerical uids to full txids
public getMempoolBlocks(): MempoolBlock[] {
return this.mempoolBlocks.map((block) => {
@ -33,13 +39,11 @@ class MempoolBlocks {
return this.mempoolBlockDeltas;
}
public updateMempoolBlocks(memPool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): MempoolBlockWithTransactions[] {
public updateMempoolBlocks(memPool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): MempoolBlockWithTransactions[] {
const latestMempool = memPool;
const memPoolArray: TransactionExtended[] = [];
const memPoolArray: MempoolTransactionExtended[] = [];
for (const i in latestMempool) {
if (latestMempool.hasOwnProperty(i)) {
memPoolArray.push(latestMempool[i]);
}
memPoolArray.push(latestMempool[i]);
}
const start = new Date().getTime();
@ -49,17 +53,24 @@ class MempoolBlocks {
tx.ancestors = [];
tx.cpfpChecked = false;
if (!tx.effectiveFeePerVsize) {
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.effectiveFeePerVsize = tx.adjustedFeePerVsize;
}
});
// First sort
memPoolArray.sort((a, b) => b.feePerVsize - a.feePerVsize);
memPoolArray.sort((a, b) => {
if (a.adjustedFeePerVsize === b.adjustedFeePerVsize) {
// tie-break by lexicographic txid order for stability
return a.txid < b.txid ? -1 : 1;
} else {
return b.adjustedFeePerVsize - a.adjustedFeePerVsize;
}
});
// Loop through and traverse all ancestors and sum up all the sizes + fees
// Pass down size + fee to all unconfirmed children
let sizes = 0;
memPoolArray.forEach((tx, i) => {
memPoolArray.forEach((tx) => {
sizes += tx.weight;
if (sizes > 4000000 * 8) {
return;
@ -68,13 +79,20 @@ class MempoolBlocks {
});
// Final sort, by effective fee
memPoolArray.sort((a, b) => b.effectiveFeePerVsize - a.effectiveFeePerVsize);
memPoolArray.sort((a, b) => {
if (a.effectiveFeePerVsize === b.effectiveFeePerVsize) {
// tie-break by lexicographic txid order for stability
return a.txid < b.txid ? -1 : 1;
} else {
return b.effectiveFeePerVsize - a.effectiveFeePerVsize;
}
});
const end = new Date().getTime();
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
const blocks = this.calculateMempoolBlocks(memPoolArray);
if (saveResults) {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
@ -85,26 +103,63 @@ class MempoolBlocks {
return blocks;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
private calculateMempoolBlocks(transactionsSorted: MempoolTransactionExtended[]): MempoolBlockWithTransactions[] {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
let blockWeight = 0;
let feeStatsCalculator: OnlineFeeStatsCalculator = new OnlineFeeStatsCalculator(config.MEMPOOL.BLOCK_WEIGHT_UNITS);
let onlineStats = false;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
transactionsSorted.forEach((tx) => {
let blockWeight = 0;
let blockVsize = 0;
let blockFees = 0;
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
let transactionIds: string[] = [];
let transactions: MempoolTransactionExtended[] = [];
transactionsSorted.forEach((tx, index) => {
if (blockWeight + tx.weight <= config.MEMPOOL.BLOCK_WEIGHT_UNITS
|| mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
tx.position = {
block: mempoolBlocks.length,
vsize: blockVsize + (tx.vsize / 2),
};
blockWeight += tx.weight;
blockVsize += tx.vsize;
blockSize += tx.size;
transactions.push(tx);
blockFees += tx.fee;
if (blockVsize <= sizeLimit) {
transactions.push(tx);
}
transactionIds.push(tx.txid);
if (onlineStats) {
feeStatsCalculator.processNext(tx);
}
} else {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactionIds, transactions, blockSize, blockWeight, blockFees));
blockVsize = 0;
tx.position = {
block: mempoolBlocks.length,
vsize: blockVsize + (tx.vsize / 2),
};
if (mempoolBlocks.length === config.MEMPOOL.MEMPOOL_BLOCKS_AMOUNT - 1) {
const stackWeight = transactionsSorted.slice(index).reduce((total, tx) => total + (tx.weight || 0), 0);
if (stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
onlineStats = true;
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
feeStatsCalculator.processNext(tx);
}
}
blockVsize += tx.vsize;
blockWeight = tx.weight;
blockSize = tx.size;
blockFees = tx.fee;
transactionIds = [tx.txid];
transactions = [tx];
}
});
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
const feeStats = onlineStats ? feeStatsCalculator.getRawFeeStats() : undefined;
mempoolBlocks.push(this.dataToMempoolBlocks(transactionIds, transactions, blockSize, blockWeight, blockFees, feeStats));
}
return mempoolBlocks;
@ -115,6 +170,7 @@ class MempoolBlocks {
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let removed: string[] = [];
const changed: { txid: string, rate: number | undefined }[] = [];
if (mempoolBlocks[i] && !prevBlocks[i]) {
added = mempoolBlocks[i].transactions;
} else if (!mempoolBlocks[i] && prevBlocks[i]) {
@ -123,7 +179,7 @@ class MempoolBlocks {
const prevIds = {};
const newIds = {};
prevBlocks[i].transactions.forEach(tx => {
prevIds[tx.txid] = true;
prevIds[tx.txid] = tx;
});
mempoolBlocks[i].transactions.forEach(tx => {
newIds[tx.txid] = true;
@ -136,30 +192,45 @@ class MempoolBlocks {
mempoolBlocks[i].transactions.forEach(tx => {
if (!prevIds[tx.txid]) {
added.push(tx);
} else if (tx.rate !== prevIds[tx.txid].rate) {
changed.push({ txid: tx.txid, rate: tx.rate });
}
});
}
mempoolBlockDeltas.push({
added,
removed
removed,
changed,
});
}
return mempoolBlockDeltas;
}
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
public async $makeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
this.resetUids();
for (const tx of Object.values(newMempool)) {
this.setUid(tx);
}
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
const strippedMempool: Map<number, CompactThreadTransaction> = new Map();
Object.values(newMempool).forEach(entry => {
strippedMempool[entry.txid] = {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
if (entry.uid !== null && entry.uid !== undefined) {
const stripped = {
uid: entry.uid,
fee: entry.fee,
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
};
strippedMempool.set(entry.uid, stripped);
}
});
// (re)initialize tx selection worker thread
@ -178,7 +249,7 @@ class MempoolBlocks {
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
const workerResultPromise = new Promise<{ blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
@ -186,102 +257,268 @@ class MempoolBlocks {
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
const { blocks, rates, clusters } = this.convertResultTxids(await workerResultPromise);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
return this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
return this.mempoolBlocks;
}
public async updateBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, added: TransactionExtended[], removed: string[], saveResults: boolean = false): Promise<void> {
public async $updateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[], saveResults: boolean = false): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
this.makeBlockTemplates(newMempool, saveResults);
await this.$makeBlockTemplates(newMempool, saveResults);
return;
}
const start = Date.now();
for (const tx of Object.values(added)) {
this.setUid(tx, true);
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: ThreadTransaction[] = added.map(entry => {
const addedStripped: CompactThreadTransaction[] = added.filter(entry => (entry.uid !== null && entry.uid !== undefined)).map(entry => {
return {
txid: entry.txid,
uid: entry.uid || 0,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
weight: (entry.adjustedVsize * 4),
sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
};
});
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
const workerResultPromise = new Promise<{ blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed: removedUids });
const { blocks, rates, clusters } = this.convertResultTxids(await workerResultPromise);
this.removeUids(removedUids);
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
private processBlockTemplates(mempool, blocks, clusters, saveResults): MempoolBlockWithTransactions[] {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
if (tx.cpfpRoot && tx.cpfpRoot in clusters) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (txid === tx.txid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: mempool[txid].weight,
};
if (matched) {
descendants.push(relative);
} else {
ancestors.push(relative);
}
}
});
mempool[tx.txid].ancestors = ancestors;
mempool[tx.txid].descendants = descendants;
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
}
});
});
private resetRustGbt(): void {
this.rustInitialized = false;
this.rustGbtGenerator = new GbtGenerator();
}
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), blockIndex);
private async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
if (saveResults) {
this.resetUids();
}
// set missing short ids
for (const tx of Object.values(newMempool)) {
this.setUid(tx, !saveResults);
}
// set short ids for transaction inputs
for (const tx of Object.values(newMempool)) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
// run the block construction algorithm in a separate thread, and wait for a result
const rustGbt = saveResults ? this.rustGbtGenerator : new GbtGenerator();
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], this.nextUid),
);
if (saveResults) {
this.rustInitialized = true;
}
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, saveResults);
logger.debug(`RUST makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
logger.err('RUST makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
if (saveResults) {
this.resetRustGbt();
}
}
return this.mempoolBlocks;
}
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }): Promise<MempoolBlockWithTransactions[]> {
return this.$rustMakeBlockTemplates(newMempool, false);
}
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[]): Promise<void> {
// GBT optimization requires that uids never get too sparse
// as a sanity check, we should also explicitly prevent uint32 uid overflow
if (this.nextUid + added.length >= Math.min(Math.max(262144, 2 * mempoolSize), MAX_UINT32)) {
this.resetRustGbt();
}
if (!this.rustInitialized) {
// need to reset the worker
await this.$rustMakeBlockTemplates(newMempool, true);
return;
}
const start = Date.now();
// set missing short ids
for (const tx of added) {
this.setUid(tx, true);
}
// set short ids for transaction inputs
for (const tx of added) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// run the block construction algorithm in a separate thread, and wait for a result
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await this.rustGbtGenerator.update(
added as RustThreadTransaction[],
removedUids,
this.nextUid,
),
);
const resultMempoolSize = blocks.reduce((total, block) => total + block.length, 0);
if (mempoolSize !== resultMempoolSize) {
throw new Error('GBT returned wrong number of transactions, cache is probably out of sync');
} else {
this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, true);
}
this.removeUids(removedUids);
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
} catch (e) {
logger.err('RUST updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
this.resetRustGbt();
}
}
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], saveResults): MempoolBlockWithTransactions[] {
for (const [txid, rate] of rates) {
if (txid in mempool) {
mempool[txid].effectiveFeePerVsize = rate;
mempool[txid].cpfpChecked = false;
}
}
const lastBlockIndex = blocks.length - 1;
let hasBlockStack = blocks.length >= 8;
let stackWeight;
let feeStatsCalculator: OnlineFeeStatsCalculator | void;
if (hasBlockStack) {
if (blockWeights && blockWeights[7] !== null) {
stackWeight = blockWeights[7];
} else {
stackWeight = blocks[lastBlockIndex].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0);
}
hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS;
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
}
for (const cluster of clusters) {
for (const memberTxid of cluster) {
const mempoolTx = mempool[memberTxid];
if (mempoolTx) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
let matched = false;
cluster.forEach(txid => {
if (txid === memberTxid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: (mempool[txid].adjustedVsize * 4),
};
if (matched) {
descendants.push(relative);
mempoolTx.lastBoosted = Math.max(mempoolTx.lastBoosted || 0, mempool[txid].firstSeen || 0);
} else {
ancestors.push(relative);
}
}
});
Object.assign(mempoolTx, {ancestors, descendants, bestDescendant: null, cpfpChecked: true});
}
}
}
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
// update this thread's mempool with the results
let mempoolTx: MempoolTransactionExtended;
const mempoolBlocks: MempoolBlockWithTransactions[] = blocks.map((block, blockIndex) => {
let totalSize = 0;
let totalVsize = 0;
let totalWeight = 0;
let totalFees = 0;
const transactions: MempoolTransactionExtended[] = [];
for (const txid of block) {
if (txid) {
mempoolTx = mempool[txid];
// save position in projected blocks
mempoolTx.position = {
block: blockIndex,
vsize: totalVsize + (mempoolTx.vsize / 2),
};
if (!mempoolTx.cpfpChecked) {
if (mempoolTx.ancestors?.length) {
mempoolTx.ancestors = [];
}
if (mempoolTx.descendants?.length) {
mempoolTx.descendants = [];
}
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
}
// online calculation of stack-of-blocks fee stats
if (hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) {
feeStatsCalculator.processNext(mempoolTx);
}
totalSize += mempoolTx.size;
totalVsize += mempoolTx.vsize;
totalWeight += mempoolTx.weight;
totalFees += mempoolTx.fee;
if (totalVsize <= sizeLimit) {
transactions.push(mempoolTx);
}
}
}
return this.dataToMempoolBlocks(
block,
transactions,
totalSize,
totalWeight,
totalFees,
(hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
);
});
if (saveResults) {
@ -293,37 +530,97 @@ class MempoolBlocks {
return mempoolBlocks;
}
private dataToMempoolBlocks(transactions: TransactionExtended[], blocksIndex: number): MempoolBlockWithTransactions {
let totalSize = 0;
let totalWeight = 0;
const fitTransactions: TransactionExtended[] = [];
transactions.forEach(tx => {
totalSize += tx.size;
totalWeight += tx.weight;
if ((totalWeight + tx.weight) <= config.MEMPOOL.BLOCK_WEIGHT_UNITS * 1.2) {
fitTransactions.push(tx);
}
});
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
}
if (transactions.length > 4000) {
rangeLength = 6;
} else if (transactions.length > 10000) {
rangeLength = 8;
private dataToMempoolBlocks(transactionIds: string[], transactions: MempoolTransactionExtended[], totalSize: number, totalWeight: number, totalFees: number, feeStats?: EffectiveFeeStats ): MempoolBlockWithTransactions {
if (!feeStats) {
feeStats = Common.calcEffectiveFeeStatistics(transactions);
}
return {
blockSize: totalSize,
blockVSize: totalWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: fitTransactions.map((tx) => Common.stripTransaction(tx)),
blockVSize: (totalWeight / 4), // fractional vsize to avoid rounding errors
nTx: transactionIds.length,
totalFees: totalFees,
medianFee: feeStats.medianFee, // Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: feeStats.feeRange, //Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactionIds,
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
};
}
private resetUids(): void {
this.uidMap.clear();
this.nextUid = 1;
}
private setUid(tx: MempoolTransactionExtended, skipSet = false): number {
if (tx.uid === null || tx.uid === undefined || !skipSet) {
const uid = this.nextUid;
this.nextUid++;
this.uidMap.set(uid, tx.txid);
tx.uid = uid;
return uid;
} else {
return tx.uid;
}
}
private getUid(tx: MempoolTransactionExtended): number | void {
if (tx?.uid !== null && tx?.uid !== undefined && this.uidMap.has(tx.uid)) {
return tx.uid;
}
}
private removeUids(uids: number[]): void {
for (const uid of uids) {
this.uidMap.delete(uid);
}
}
private convertResultTxids({ blocks, rates, clusters }: { blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]>})
: { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }} {
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
return this.uidMap.get(uid) || '';
}));
const convertedRates = {};
for (const rateUid of rates.keys()) {
const rateTxid = this.uidMap.get(rateUid);
if (rateTxid) {
convertedRates[rateTxid] = rates.get(rateUid);
}
}
const convertedClusters = {};
for (const rootUid of clusters.keys()) {
const rootTxid = this.uidMap.get(rootUid);
if (rootTxid) {
const members = clusters.get(rootUid)?.map(uid => {
return this.uidMap.get(uid);
});
convertedClusters[rootTxid] = members;
}
}
return { blocks: convertedBlocks, rates: convertedRates, clusters: convertedClusters } as { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }};
}
private convertNapiResultTxids({ blocks, blockWeights, rates, clusters }: GbtResult)
: { blocks: string[][], blockWeights: number[], rates: [string, number][], clusters: string[][] } {
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
const txid = this.uidMap.get(uid);
if (txid !== undefined) {
return txid;
} else {
throw new Error('GBT returned a block containing a transaction with unknown uid');
}
}));
const convertedRates: [string, number][] = [];
for (const [rateUid, rate] of rates) {
const rateTxid = this.uidMap.get(rateUid) as string;
convertedRates.push([rateTxid, rate]);
}
const convertedClusters: string[][] = [];
for (const cluster of clusters) {
convertedClusters.push(cluster.map(uid => this.uidMap.get(uid)) as string[]);
}
return { blocks: convertedBlocks, blockWeights, rates: convertedRates, clusters: convertedClusters };
}
}
export default new MempoolBlocks();

View file

@ -1,6 +1,6 @@
import config from '../config';
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
import { MempoolTransactionExtended, TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
import logger from '../logger';
import { Common } from './common';
import transactionUtils from './transaction-utils';
@ -11,17 +11,16 @@ import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
import rbfCache from './rbf-cache';
class Mempool {
private static WEBSOCKET_REFRESH_RATE_MS = 10000;
private static LAZY_DELETE_AFTER_SECONDS = 30;
private inSync: boolean = false;
private mempoolCacheDelta: number = -1;
private mempoolCache: { [txId: string]: TransactionExtended } = {};
private mempoolCache: { [txId: string]: MempoolTransactionExtended } = {};
private spendMap = new Map<string, MempoolTransactionExtended>();
private mempoolInfo: IBitcoinApi.MempoolInfo = { loaded: false, size: 0, bytes: 0, usage: 0, total_fee: 0,
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => Promise<void>) | undefined;
private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => void) | undefined;
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, mempoolSize: number, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
@ -35,10 +34,10 @@ class Mempool {
private SAMPLE_TIME = 10000; // In ms
private timer = new Date().getTime();
private missingTxCount = 0;
private mainLoopTimeout: number = 120000;
constructor() {
setInterval(this.updateTxPerSecond.bind(this), 1000);
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
}
/**
@ -65,28 +64,43 @@ class Mempool {
return this.latestTransactions;
}
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => void) {
public setMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; },
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => void): void {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
this.asyncMempoolChangedCallback = fn;
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void {
this.$asyncMempoolChangedCallback = fn;
}
public getMempool(): { [txid: string]: TransactionExtended } {
public getMempool(): { [txid: string]: MempoolTransactionExtended } {
return this.mempoolCache;
}
public setMempool(mempoolData: { [txId: string]: TransactionExtended }) {
public getSpendMap(): Map<string, MempoolTransactionExtended> {
return this.spendMap;
}
public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) {
this.mempoolCache = mempoolData;
let count = 0;
for (const txid of Object.keys(this.mempoolCache)) {
if (this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) {
this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]);
}
if (this.mempoolCache[txid].order == null) {
this.mempoolCache[txid].order = transactionUtils.txidToOrdering(txid);
}
count++;
}
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
}
if (this.asyncMempoolChangedCallback) {
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
if (this.$asyncMempoolChangedCallback) {
await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], []);
}
this.addToSpendMap(Object.values(this.mempoolCache));
}
public async $updateMemPoolInfo() {
@ -118,19 +132,23 @@ class Mempool {
return txTimes;
}
public async $updateMempool(): Promise<void> {
public async $updateMempool(transactions: string[]): Promise<void> {
logger.debug(`Updating mempool...`);
// warn if this run stalls the main loop for more than 2 minutes
const timer = this.startTimer();
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
const transactions = await bitcoinApi.$getRawMempool();
this.updateTimerProgress(timer, 'got raw mempool');
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
const newTransactions: MempoolTransactionExtended[] = [];
this.mempoolCacheDelta = Math.abs(diff);
if (!this.inSync) {
loadingIndicators.setProgress('mempool', Object.keys(this.mempoolCache).length / transactions.length * 100);
loadingIndicators.setProgress('mempool', currentMempoolSize / transactions.length * 100);
}
// https://github.com/mempool/mempool/issues/3283
@ -143,10 +161,12 @@ class Mempool {
}
};
let intervalTimer = Date.now();
for (const txid of transactions) {
if (!this.mempoolCache[txid]) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
const transaction = await transactionUtils.$getMempoolTransactionExtended(txid, false, false, false);
this.updateTimerProgress(timer, 'fetched new transaction');
this.mempoolCache[txid] = transaction;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
@ -165,8 +185,19 @@ class Mempool {
}
}
if ((new Date().getTime()) - start > Mempool.WEBSOCKET_REFRESH_RATE_MS) {
break;
if (Date.now() - intervalTimer > 5_000) {
if (this.inSync) {
// Break and restart mempool loop if we spend too much time processing
// new transactions that may lead to falling behind on block height
logger.debug('Breaking mempool loop because the 5s time limit exceeded.');
break;
} else {
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
loadingIndicators.setProgress('mempool', progress);
intervalTimer = Date.now()
}
}
}
@ -192,7 +223,7 @@ class Mempool {
}, 1000 * 60 * config.MEMPOOL.CLEAR_PROTECTION_MINUTES);
}
const deletedTransactions: TransactionExtended[] = [];
const deletedTransactions: MempoolTransactionExtended[] = [];
if (this.mempoolProtection !== 1) {
this.mempoolProtection = 0;
@ -200,45 +231,100 @@ class Mempool {
const transactionsObject = {};
transactions.forEach((txId) => transactionsObject[txId] = true);
// Flag transactions for lazy deletion
// Delete evicted transactions from mempool
for (const tx in this.mempoolCache) {
if (!transactionsObject[tx] && !this.mempoolCache[tx].deleteAfter) {
if (!transactionsObject[tx]) {
deletedTransactions.push(this.mempoolCache[tx]);
this.mempoolCache[tx].deleteAfter = new Date().getTime() + Mempool.LAZY_DELETE_AFTER_SECONDS * 1000;
}
}
for (const tx of deletedTransactions) {
delete this.mempoolCache[tx.txid];
}
}
const newMempoolSize = currentMempoolSize + newTransactions.length - deletedTransactions.length;
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
if (!this.inSync && transactions.length === Object.keys(this.mempoolCache).length) {
if (!this.inSync && transactions.length === newMempoolSize) {
this.inSync = true;
logger.notice('The mempool is now in sync!');
loadingIndicators.setProgress('mempool', 100);
}
this.mempoolCacheDelta = Math.abs(transactions.length - Object.keys(this.mempoolCache).length);
this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.updateTimerProgress(timer, 'running async mempool callback');
await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions);
this.updateTimerProgress(timer, 'completed async mempool callback');
}
const end = new Date().getTime();
const time = end - start;
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
this.clearTimer(timer);
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
private startTimer() {
const state: any = {
start: Date.now(),
progress: 'begin $updateMempool',
timer: null,
};
state.timer = setTimeout(() => {
logger.err(`$updateMempool stalled at "${state.progress}"`);
}, this.mainLoopTimeout);
return state;
}
private updateTimerProgress(state, msg) {
state.progress = msg;
}
private clearTimer(state) {
if (state.timer) {
clearTimeout(state.timer);
}
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: MempoolTransactionExtended[]; }): void {
for (const rbfTransaction in rbfTransactions) {
if (this.mempoolCache[rbfTransaction]) {
if (this.mempoolCache[rbfTransaction] && rbfTransactions[rbfTransaction]?.length) {
// Store replaced transactions
rbfCache.add(this.mempoolCache[rbfTransaction], rbfTransactions[rbfTransaction].txid);
// Erase the replaced transactions from the local mempool
delete this.mempoolCache[rbfTransaction];
rbfCache.add(rbfTransactions[rbfTransaction], this.mempoolCache[rbfTransaction]);
}
}
}
public handleMinedRbfTransactions(rbfTransactions: { [txid: string]: { replaced: MempoolTransactionExtended[], replacedBy: TransactionExtended }}): void {
for (const rbfTransaction in rbfTransactions) {
if (rbfTransactions[rbfTransaction].replacedBy && rbfTransactions[rbfTransaction]?.replaced?.length) {
// Store replaced transactions
rbfCache.add(rbfTransactions[rbfTransaction].replaced, transactionUtils.extendMempoolTransaction(rbfTransactions[rbfTransaction].replacedBy));
}
}
}
public addToSpendMap(transactions: MempoolTransactionExtended[]): void {
for (const tx of transactions) {
for (const vin of tx.vin) {
this.spendMap.set(`${vin.txid}:${vin.vout}`, tx);
}
}
}
public removeFromSpendMap(transactions: TransactionExtended[]): void {
for (const tx of transactions) {
for (const vin of tx.vin) {
const key = `${vin.txid}:${vin.vout}`;
if (this.spendMap.get(key)?.txid === tx.txid) {
this.spendMap.delete(key);
}
}
}
}
@ -256,17 +342,6 @@ class Mempool {
}
}
private deleteExpiredTransactions() {
const now = new Date().getTime();
for (const tx in this.mempoolCache) {
const lazyDeleteAt = this.mempoolCache[tx].deleteAfter;
if (lazyDeleteAt && lazyDeleteAt < now) {
delete this.mempoolCache[tx];
rbfCache.evict(tx);
}
}
}
private $getMempoolInfo() {
if (config.MEMPOOL.USE_SECOND_NODE_FOR_MINFEE) {
return Promise.all([

View file

@ -26,7 +26,7 @@ class MiningRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', this.$getHistoricalBlockFeeRates)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlocksHealth)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
@ -244,15 +244,15 @@ class MiningRoutes {
}
}
private async $getHistoricalBlockPrediction(req: Request, res: Response) {
private async $getHistoricalBlocksHealth(req: Request, res: Response) {
try {
const blockPredictions = await mining.$getBlockPredictionsHistory(req.params.interval);
const blockCount = await BlocksAuditsRepository.$getPredictionsCount();
const blocksHealth = await mining.$getBlocksHealthHistory(req.params.interval);
const blockCount = await BlocksAuditsRepository.$getBlocksHealthCount();
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.header('X-total-count', blockCount.toString());
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(blockPredictions.map(prediction => [prediction.time, prediction.height, prediction.match_rate]));
res.json(blocksHealth.map(health => [health.time, health.height, health.match_rate]));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}

View file

@ -13,17 +13,21 @@ import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import PricesRepository from '../../repositories/PricesRepository';
import { bitcoinCoreApi } from '../bitcoin/bitcoin-api-factory';
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
import database from '../../database';
class Mining {
private blocksPriceIndexingRunning = false;
public lastHashrateIndexingDate: number | null = null;
public lastWeeklyHashrateIndexingDate: number | null = null;
public reindexHashrateRequested = false;
public reindexDifficultyAdjustmentRequested = false;
/**
* Get historical block predictions match rate
* Get historical blocks health
*/
public async $getBlockPredictionsHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlockPredictionsHistory(
public async $getBlocksHealthHistory(interval: string | null = null): Promise<any> {
return await BlocksAuditsRepository.$getBlocksHealthHistory(
this.getTimeRange(interval),
Common.getSqlInterval(interval)
);
@ -102,6 +106,7 @@ class Mining {
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
slug: poolInfo.slug,
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
avgFeeDelta: poolInfo.avgFeeDelta,
};
poolsStats.push(poolStat);
});
@ -141,6 +146,9 @@ class Mining {
const blockCount1w: number = await BlocksRepository.$blockCount(pool.id, '1w');
const totalBlock1w: number = await BlocksRepository.$blockCount(null, '1w');
const avgHealth = await BlocksRepository.$getAvgBlockHealthPerPoolId(pool.id);
const totalReward = await BlocksRepository.$getTotalRewardForPoolId(pool.id);
let currentEstimatedHashrate = 0;
try {
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
@ -162,6 +170,8 @@ class Mining {
},
estimatedHashrate: currentEstimatedHashrate * (blockCount24h / totalBlock24h),
reportedHashrate: null,
avgBlockHealth: avgHealth,
totalReward: totalReward,
};
}
@ -284,6 +294,14 @@ class Mining {
* Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// If a re-index was requested, truncate first
if (this.reindexHashrateRequested === true) {
logger.notice(`hashrates will now be re-indexed`);
await database.query(`TRUNCATE hashrates`);
this.lastHashrateIndexingDate = 0;
this.reindexHashrateRequested = false;
}
// We only run this once a day around midnight
const today = new Date().getUTCDate();
if (today === this.lastHashrateIndexingDate) {
@ -389,6 +407,13 @@ class Mining {
* Index difficulty adjustments
*/
public async $indexDifficultyAdjustments(): Promise<void> {
// If a re-index was requested, truncate first
if (this.reindexDifficultyAdjustmentRequested === true) {
logger.notice(`difficulty_adjustments will now be re-indexed`);
await database.query(`TRUNCATE difficulty_adjustments`);
this.reindexDifficultyAdjustmentRequested = false;
}
const indexedHeightsArray = await DifficultyAdjustmentsRepository.$getAdjustmentsHeights();
const indexedHeights = {};
for (const height of indexedHeightsArray) {
@ -446,7 +471,7 @@ class Mining {
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
logger.debug(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
@ -467,11 +492,11 @@ class Mining {
}
this.blocksPriceIndexingRunning = true;
let totalInserted = 0;
try {
const prices: any[] = await PricesRepository.$getPricesTimesAndId();
const blocksWithoutPrices: any[] = await BlocksRepository.$getBlocksWithoutPrice();
let totalInserted = 0;
const blocksPrices: BlockPrice[] = [];
for (const block of blocksWithoutPrices) {
@ -516,7 +541,13 @@ class Mining {
}
} catch (e) {
this.blocksPriceIndexingRunning = false;
throw e;
logger.err(`Cannot index block prices. ${e}`);
}
if (totalInserted > 0) {
logger.info(`Indexing blocks prices completed. Indexed ${totalInserted}`, logger.tags.mining);
} else {
logger.debug(`Indexing blocks prices completed. Indexed 0.`, logger.tags.mining);
}
this.blocksPriceIndexingRunning = false;
@ -552,8 +583,10 @@ class Mining {
currentBlockHeight -= 10000;
}
if (totalIndexed) {
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
if (totalIndexed > 0) {
logger.info(`Indexing missing coinstatsindex data completed. Indexed ${totalIndexed}`, logger.tags.mining);
} else {
logger.debug(`Indexing missing coinstatsindex data completed. Indexed 0.`, logger.tags.mining);
}
}

View file

@ -4,6 +4,7 @@ import config from '../config';
import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces';
import diskCache from './disk-cache';
import mining from './mining/mining';
class PoolsParser {
miningPools: any[] = [];
@ -41,7 +42,7 @@ class PoolsParser {
public async migratePoolsJson(): Promise<void> {
// We also need to wipe the backend cache to make sure we don't serve blocks with
// the wrong mining pool (usually happen with unknown blocks)
diskCache.wipeCache();
diskCache.setIgnoreBlocksCache();
await this.$insertUnknownPool();
@ -73,14 +74,12 @@ class PoolsParser {
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
// Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool.`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB);
}
}
}
logger.info('Mining pools-v2.json import completed');
}
/**
@ -118,10 +117,6 @@ class PoolsParser {
* @param pool
*/
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`
@ -132,7 +127,15 @@ class PoolsParser {
LIMIT 1`,
[pool.id]
);
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
if (config.MEMPOOL.NETWORK === 'testnet') {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
} else if (config.MEMPOOL.NETWORK === 'signet') {
firstKnownBlockPool = 0;
}
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : firstKnownBlockPool;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
await DB.query(`
@ -146,16 +149,31 @@ class PoolsParser {
WHERE pool_id = ?`,
[pool.id]
);
// Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
}
private async $deleteUnknownBlocks(): Promise<void> {
let firstKnownBlockPool = 130635; // https://mempool.space/block/0000000000000a067d94ff753eec72830f1205ad3a4c216a08a80c832e551a52
if (config.MEMPOOL.NETWORK === 'testnet') {
firstKnownBlockPool = 21106; // https://mempool.space/testnet/block/0000000070b701a5b6a1b965f6a38e0472e70b2bb31b973e4638dec400877581
} else if (config.MEMPOOL.NETWORK === 'signet') {
firstKnownBlockPool = 0;
}
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${firstKnownBlockPool} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= 130635`,
WHERE pool_id = ? AND height >= ${firstKnownBlockPool}`,
[unknownPool[0].id]
);
// Re-index hashrates and difficulty adjustments later
mining.reindexHashrateRequested = true;
mining.reindexDifficultyAdjustmentRequested = true;
}
}

View file

@ -1,65 +1,391 @@
import { TransactionExtended } from "../mempool.interfaces";
import logger from "../logger";
import { MempoolTransactionExtended, TransactionStripped } from "../mempool.interfaces";
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { Common } from "./common";
interface RbfTransaction extends TransactionStripped {
rbf?: boolean;
mined?: boolean;
fullRbf?: boolean;
}
interface RbfTree {
tx: RbfTransaction;
time: number;
interval?: number;
mined?: boolean;
fullRbf: boolean;
replaces: RbfTree[];
}
export interface ReplacementInfo {
mined: boolean;
fullRbf: boolean;
txid: string;
oldFee: number;
oldVsize: number;
newFee: number;
newVsize: number;
}
class RbfCache {
private replacedBy: { [txid: string]: string; } = {};
private replaces: { [txid: string]: string[] } = {};
private txs: { [txid: string]: TransactionExtended } = {};
private expiring: { [txid: string]: Date } = {};
private replacedBy: Map<string, string> = new Map();
private replaces: Map<string, string[]> = new Map();
private rbfTrees: Map<string, RbfTree> = new Map(); // sequences of consecutive replacements
private dirtyTrees: Set<string> = new Set();
private treeMap: Map<string, string> = new Map(); // map of txids to sequence ids
private txs: Map<string, MempoolTransactionExtended> = new Map();
private expiring: Map<string, number> = new Map();
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 60);
setInterval(this.cleanup.bind(this), 1000 * 60 * 10);
}
public add(replacedTx: TransactionExtended, newTxId: string): void {
this.replacedBy[replacedTx.txid] = newTxId;
this.txs[replacedTx.txid] = replacedTx;
if (!this.replaces[newTxId]) {
this.replaces[newTxId] = [];
public add(replaced: MempoolTransactionExtended[], newTxExtended: MempoolTransactionExtended): void {
if (!newTxExtended || !replaced?.length || this.txs.has(newTxExtended.txid)) {
return;
}
this.replaces[newTxId].push(replacedTx.txid);
const newTx = Common.stripTransaction(newTxExtended) as RbfTransaction;
const newTime = newTxExtended.firstSeen || (Date.now() / 1000);
newTx.rbf = newTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
this.txs.set(newTx.txid, newTxExtended);
// maintain rbf trees
let txFullRbf = false;
let treeFullRbf = false;
const replacedTrees: RbfTree[] = [];
for (const replacedTxExtended of replaced) {
const replacedTx = Common.stripTransaction(replacedTxExtended) as RbfTransaction;
replacedTx.rbf = replacedTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
if (!replacedTx.rbf) {
txFullRbf = true;
}
this.replacedBy.set(replacedTx.txid, newTx.txid);
if (this.treeMap.has(replacedTx.txid)) {
const treeId = this.treeMap.get(replacedTx.txid);
if (treeId) {
const tree = this.rbfTrees.get(treeId);
this.rbfTrees.delete(treeId);
if (tree) {
tree.interval = newTime - tree?.time;
replacedTrees.push(tree);
treeFullRbf = treeFullRbf || tree.fullRbf || !tree.tx.rbf;
}
}
} else {
const replacedTime = replacedTxExtended.firstSeen || (Date.now() / 1000);
replacedTrees.push({
tx: replacedTx,
time: replacedTime,
interval: newTime - replacedTime,
fullRbf: !replacedTx.rbf,
replaces: [],
});
treeFullRbf = treeFullRbf || !replacedTx.rbf;
this.txs.set(replacedTx.txid, replacedTxExtended);
}
}
newTx.fullRbf = txFullRbf;
const treeId = replacedTrees[0].tx.txid;
const newTree = {
tx: newTx,
time: newTime,
fullRbf: treeFullRbf,
replaces: replacedTrees
};
this.rbfTrees.set(treeId, newTree);
this.updateTreeMap(treeId, newTree);
this.replaces.set(newTx.txid, replacedTrees.map(tree => tree.tx.txid));
this.dirtyTrees.add(treeId);
}
public getReplacedBy(txId: string): string | undefined {
return this.replacedBy[txId];
return this.replacedBy.get(txId);
}
public getReplaces(txId: string): string[] | undefined {
return this.replaces[txId];
return this.replaces.get(txId);
}
public getTx(txId: string): TransactionExtended | undefined {
return this.txs[txId];
public getTx(txId: string): MempoolTransactionExtended | undefined {
return this.txs.get(txId);
}
public getRbfTree(txId: string): RbfTree | void {
return this.rbfTrees.get(this.treeMap.get(txId) || '');
}
// get a paginated list of RbfTrees
// ordered by most recent replacement time
public getRbfTrees(onlyFullRbf: boolean, after?: string): RbfTree[] {
const limit = 25;
const trees: RbfTree[] = [];
const used = new Set<string>();
const replacements: string[][] = Array.from(this.replacedBy).reverse();
const afterTree = after ? this.treeMap.get(after) : null;
let ready = !afterTree;
for (let i = 0; i < replacements.length && trees.length <= limit - 1; i++) {
const txid = replacements[i][1];
const treeId = this.treeMap.get(txid) || '';
if (treeId === afterTree) {
ready = true;
} else if (ready) {
if (!used.has(treeId)) {
const tree = this.rbfTrees.get(treeId);
used.add(treeId);
if (tree && (!onlyFullRbf || tree.fullRbf)) {
trees.push(tree);
}
}
}
}
return trees;
}
// get map of rbf trees that have been updated since the last call
public getRbfChanges(): { trees: {[id: string]: RbfTree }, map: { [txid: string]: string }} {
const changes: { trees: {[id: string]: RbfTree }, map: { [txid: string]: string }} = {
trees: {},
map: {},
};
this.dirtyTrees.forEach(id => {
const tree = this.rbfTrees.get(id);
if (tree) {
changes.trees[id] = tree;
this.getTransactionsInTree(tree).forEach(tx => {
changes.map[tx.txid] = id;
});
}
});
this.dirtyTrees = new Set();
return changes;
}
public mined(txid): void {
if (!this.txs.has(txid)) {
return;
}
const treeId = this.treeMap.get(txid);
if (treeId && this.rbfTrees.has(treeId)) {
const tree = this.rbfTrees.get(treeId);
if (tree) {
this.setTreeMined(tree, txid);
tree.mined = true;
this.dirtyTrees.add(treeId);
}
}
this.evict(txid);
}
// flag a transaction as removed from the mempool
public evict(txid): void {
this.expiring[txid] = new Date(Date.now() + 1000 * 86400); // 24 hours
public evict(txid: string, fast: boolean = false): void {
if (this.txs.has(txid) && (fast || !this.expiring.has(txid))) {
this.expiring.set(txid, fast ? Date.now() + (1000 * 60 * 10) : Date.now() + (1000 * 86400)); // 24 hours
}
}
// is the transaction involved in a full rbf replacement?
public isFullRbf(txid: string): boolean {
const treeId = this.treeMap.get(txid);
if (!treeId) {
return false;
}
const tree = this.rbfTrees.get(treeId);
if (!tree) {
return false;
}
return tree?.fullRbf;
}
private cleanup(): void {
const currentDate = new Date();
for (const txid in this.expiring) {
if (this.expiring[txid] < currentDate) {
delete this.expiring[txid];
const now = Date.now();
for (const txid of this.expiring.keys()) {
if ((this.expiring.get(txid) || 0) < now) {
this.expiring.delete(txid);
this.remove(txid);
}
}
logger.debug(`rbf cache contains ${this.txs.size} txs, ${this.expiring.size} due to expire`);
}
// remove a transaction & all previous versions from the cache
private remove(txid): void {
// don't remove a transaction while a newer version remains in the mempool
if (this.replaces[txid] && !this.replacedBy[txid]) {
const replaces = this.replaces[txid];
delete this.replaces[txid];
for (const tx of replaces) {
// don't remove a transaction if a newer version remains in the mempool
if (!this.replacedBy.has(txid)) {
const replaces = this.replaces.get(txid);
this.replaces.delete(txid);
this.treeMap.delete(txid);
this.txs.delete(txid);
this.expiring.delete(txid);
for (const tx of (replaces || [])) {
// recursively remove prior versions from the cache
delete this.replacedBy[tx];
delete this.txs[tx];
this.replacedBy.delete(tx);
// if this is the id of a tree, remove that too
if (this.treeMap.get(tx) === tx) {
this.rbfTrees.delete(tx);
}
this.remove(tx);
}
}
}
private updateTreeMap(newId: string, tree: RbfTree): void {
this.treeMap.set(tree.tx.txid, newId);
tree.replaces.forEach(subtree => {
this.updateTreeMap(newId, subtree);
});
}
private getTransactionsInTree(tree: RbfTree, txs: RbfTransaction[] = []): RbfTransaction[] {
txs.push(tree.tx);
tree.replaces.forEach(subtree => {
this.getTransactionsInTree(subtree, txs);
});
return txs;
}
private setTreeMined(tree: RbfTree, txid: string): void {
if (tree.tx.txid === txid) {
tree.tx.mined = true;
} else {
tree.replaces.forEach(subtree => {
this.setTreeMined(subtree, txid);
});
}
}
public dump(): any {
const trees = Array.from(this.rbfTrees.values()).map((tree: RbfTree) => { return this.exportTree(tree); });
return {
txs: Array.from(this.txs.entries()),
trees,
expiring: Array.from(this.expiring.entries()),
};
}
public async load({ txs, trees, expiring }): Promise<void> {
txs.forEach(txEntry => {
this.txs.set(txEntry[0], txEntry[1]);
});
for (const deflatedTree of trees) {
await this.importTree(deflatedTree.root, deflatedTree.root, deflatedTree, this.txs);
}
expiring.forEach(expiringEntry => {
if (this.txs.has(expiringEntry[0])) {
this.expiring.set(expiringEntry[0], new Date(expiringEntry[1]).getTime());
}
});
this.cleanup();
}
exportTree(tree: RbfTree, deflated: any = null) {
if (!deflated) {
deflated = {
root: tree.tx.txid,
};
}
deflated[tree.tx.txid] = {
tx: tree.tx.txid,
txMined: tree.tx.mined,
time: tree.time,
interval: tree.interval,
mined: tree.mined,
fullRbf: tree.fullRbf,
replaces: tree.replaces.map(child => child.tx.txid),
};
tree.replaces.forEach(child => {
this.exportTree(child, deflated);
});
return deflated;
}
async importTree(root, txid, deflated, txs: Map<string, MempoolTransactionExtended>, mined: boolean = false): Promise<RbfTree | void> {
const treeInfo = deflated[txid];
const replaces: RbfTree[] = [];
// check if any transactions in this tree have already been confirmed
mined = mined || treeInfo.mined;
let exists = mined;
if (!mined) {
try {
const apiTx = await bitcoinApi.$getRawTransaction(txid);
if (apiTx) {
exists = true;
}
if (apiTx?.status?.confirmed) {
mined = true;
treeInfo.txMined = true;
this.evict(txid, true);
}
} catch (e) {
// most transactions do not exist
}
}
// if the root tx is not in the mempool or the blockchain
// evict this tree as soon as possible
if (root === txid && !exists) {
this.evict(txid, true);
}
// recursively reconstruct child trees
for (const childId of treeInfo.replaces) {
const replaced = await this.importTree(root, childId, deflated, txs, mined);
if (replaced) {
this.replacedBy.set(replaced.tx.txid, txid);
replaces.push(replaced);
if (replaced.mined) {
mined = true;
}
}
}
this.replaces.set(txid, replaces.map(t => t.tx.txid));
const tx = txs.get(txid);
if (!tx) {
return;
}
const strippedTx = Common.stripTransaction(tx) as RbfTransaction;
strippedTx.rbf = tx.vin.some((v) => v.sequence < 0xfffffffe);
strippedTx.mined = treeInfo.txMined;
const tree = {
tx: strippedTx,
time: treeInfo.time,
interval: treeInfo.interval,
mined: mined,
fullRbf: treeInfo.fullRbf,
replaces,
};
this.treeMap.set(txid, root);
if (root === txid) {
this.rbfTrees.set(root, tree);
this.dirtyTrees.add(root);
}
return tree;
}
public getLatestRbfSummary(): ReplacementInfo[] {
const rbfList = this.getRbfTrees(false);
return rbfList.slice(0, 6).map(rbfTree => {
let oldFee = 0;
let oldVsize = 0;
for (const replaced of rbfTree.replaces) {
oldFee += replaced.tx.fee;
oldVsize += replaced.tx.vsize;
}
return {
txid: rbfTree.tx.txid,
mined: !!rbfTree.tx.mined,
fullRbf: !!rbfTree.tx.fullRbf,
oldFee,
oldVsize,
newFee: rbfTree.tx.fee,
newVsize: rbfTree.tx.vsize,
};
});
}
}
export default new RbfCache();

View file

@ -211,7 +211,7 @@ class StatisticsApi {
CAST(avg(vsize_1800) as DOUBLE) as vsize_1800,
CAST(avg(vsize_2000) as DOUBLE) as vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
@ -259,7 +259,7 @@ class StatisticsApi {
vsize_1800,
vsize_2000 \
FROM statistics \
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
ORDER BY statistics.added DESC;`;
}
@ -386,6 +386,17 @@ class StatisticsApi {
}
}
public async $listAll(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(43200, 'all'); // 12h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$listAll() error' + (e instanceof Error ? e.message : e));
return [];
}
}
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
return statistic.map((s) => {
return {

View file

@ -15,10 +15,11 @@ class StatisticsRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/4y', this.$getStatisticsByTime.bind(this, '4y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/all', this.$getStatisticsByTime.bind(this, 'all'))
;
}
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y', req: Request, res: Response) {
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y' | 'all', req: Request, res: Response) {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
@ -26,10 +27,6 @@ class StatisticsRoutes {
try {
let result;
switch (time as string) {
case '2h':
result = await statisticsApi.$list2H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
break;
case '24h':
result = await statisticsApi.$list24H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
@ -58,8 +55,13 @@ class StatisticsRoutes {
case '4y':
result = await statisticsApi.$list4Y();
break;
case 'all':
result = await statisticsApi.$listAll();
break;
default:
result = await statisticsApi.$list2H();
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
break;
}
res.json(result);
} catch (e) {

View file

@ -1,7 +1,8 @@
import { TransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { TransactionExtended, MempoolTransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { IEsploraApi } from './bitcoin/esplora-api.interface';
import { Common } from './common';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import * as bitcoinjs from 'bitcoinjs-lib';
class TransactionUtils {
constructor() { }
@ -22,19 +23,27 @@ class TransactionUtils {
}
/**
* @param txId
* @param addPrevouts
* @param lazyPrevouts
* @param txId
* @param addPrevouts
* @param lazyPrevouts
* @param forceCore - See https://github.com/mempool/mempool/issues/2904
*/
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<TransactionExtended> {
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false, addMempoolData = false): Promise<TransactionExtended> {
let transaction: IEsploraApi.Transaction;
if (forceCore === true) {
transaction = await bitcoinCoreApi.$getRawTransaction(txId, true);
} else {
transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
}
return this.extendTransaction(transaction);
if (addMempoolData || !transaction?.status?.confirmed) {
return this.extendMempoolTransaction(transaction);
} else {
return this.extendTransaction(transaction);
}
}
public async $getMempoolTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<MempoolTransactionExtended> {
return (await this.$getTransactionExtended(txId, addPrevouts, lazyPrevouts, forceCore, true)) as MempoolTransactionExtended;
}
private extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
@ -50,8 +59,33 @@ class TransactionUtils {
feePerVsize: feePerVbytes,
effectiveFeePerVsize: feePerVbytes,
}, transaction);
if (!transaction.status.confirmed) {
transactionExtended.firstSeen = Math.round((new Date().getTime() / 1000));
if (!transaction?.status?.confirmed && !transactionExtended.firstSeen) {
transactionExtended.firstSeen = Math.round((Date.now() / 1000));
}
return transactionExtended;
}
public extendMempoolTransaction(transaction: IEsploraApi.Transaction): MempoolTransactionExtended {
const vsize = Math.ceil(transaction.weight / 4);
const fractionalVsize = (transaction.weight / 4);
const sigops = this.countSigops(transaction);
// https://github.com/bitcoin/bitcoin/blob/e9262ea32a6e1d364fb7974844fadc36f931f8c6/src/policy/policy.cpp#L295-L298
const adjustedVsize = Math.max(fractionalVsize, sigops * 5); // adjusted vsize = Max(weight, sigops * bytes_per_sigop) / witness_scale_factor
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / fractionalVsize);
const adjustedFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / adjustedVsize);
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
order: this.txidToOrdering(transaction.txid),
vsize: Math.round(transaction.weight / 4),
adjustedVsize,
sigops,
feePerVsize: feePerVbytes,
adjustedFeePerVsize: adjustedFeePerVsize,
effectiveFeePerVsize: adjustedFeePerVsize,
});
if (!transactionExtended?.status?.confirmed && !transactionExtended.firstSeen) {
transactionExtended.firstSeen = Math.round((Date.now() / 1000));
}
return transactionExtended;
}
@ -63,6 +97,75 @@ class TransactionUtils {
}
return str;
}
public countScriptSigops(script: string, isRawScript: boolean = false, witness: boolean = false): number {
let sigops = 0;
// count OP_CHECKSIG and OP_CHECKSIGVERIFY
sigops += (script.match(/OP_CHECKSIG/g)?.length || 0);
// count OP_CHECKMULTISIG and OP_CHECKMULTISIGVERIFY
if (isRawScript) {
// in scriptPubKey or scriptSig, always worth 20
sigops += 20 * (script.match(/OP_CHECKMULTISIG/g)?.length || 0);
} else {
// in redeem scripts and witnesses, worth N if preceded by OP_N, 20 otherwise
const matches = script.matchAll(/(?:OP_(\d+))? OP_CHECKMULTISIG/g);
for (const match of matches) {
const n = parseInt(match[1]);
if (Number.isInteger(n)) {
sigops += n;
} else {
sigops += 20;
}
}
}
return witness ? sigops : (sigops * 4);
}
public countSigops(transaction: IEsploraApi.Transaction): number {
let sigops = 0;
for (const input of transaction.vin) {
if (input.scriptsig_asm) {
sigops += this.countScriptSigops(input.scriptsig_asm, true);
}
if (input.prevout) {
switch (true) {
case input.prevout.scriptpubkey_type === 'p2sh' && input.witness?.length === 2 && input.scriptsig && input.scriptsig.startsWith('160014'):
case input.prevout.scriptpubkey_type === 'v0_p2wpkh':
sigops += 1;
break;
case input.prevout?.scriptpubkey_type === 'p2sh' && input.witness?.length && input.scriptsig && input.scriptsig.startsWith('220020'):
case input.prevout.scriptpubkey_type === 'v0_p2wsh':
if (input.witness?.length) {
sigops += this.countScriptSigops(bitcoinjs.script.toASM(Buffer.from(input.witness[input.witness.length - 1], 'hex')), false, true);
}
break;
}
}
}
for (const output of transaction.vout) {
if (output.scriptpubkey_asm) {
sigops += this.countScriptSigops(output.scriptpubkey_asm, true);
}
}
return sigops;
}
// returns the most significant 4 bytes of the txid as an integer
public txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}
}
export default new TransactionUtils();

View file

@ -1,11 +1,10 @@
import config from '../config';
import logger from '../logger';
import { ThreadTransaction, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { CompactThreadTransaction, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
let mempool: { [txid: string]: ThreadTransaction } = {};
let mempool: Map<number, CompactThreadTransaction> = new Map();
if (parentPort) {
parentPort.on('message', (params) => {
@ -13,18 +12,18 @@ if (parentPort) {
mempool = params.mempool;
} else if (params.type === 'update') {
params.added.forEach(tx => {
mempool[tx.txid] = tx;
mempool.set(tx.uid, tx);
});
params.removed.forEach(txid => {
delete mempool[txid];
params.removed.forEach(uid => {
mempool.delete(uid);
});
}
const { blocks, clusters } = makeBlockTemplates(mempool);
const { blocks, rates, clusters } = makeBlockTemplates(mempool);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ blocks, clusters });
parentPort.postMessage({ blocks, rates, clusters });
}
});
}
@ -33,35 +32,36 @@ if (parentPort) {
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*/
function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
: { blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } } {
function makeBlockTemplates(mempool: Map<number, CompactThreadTransaction>)
: { blocks: number[][], rates: Map<number, number>, clusters: Map<number, number[]> } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const auditPool: Map<number, AuditTransaction> = new Map();
const mempoolArray: AuditTransaction[] = [];
const restOfArray: ThreadTransaction[] = [];
const cpfpClusters: { [root: string]: string[] } = {};
const cpfpClusters: Map<number, number[]> = new Map();
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
mempool.forEach(tx => {
tx.dirty = false;
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
auditPool.set(tx.uid, {
uid: tx.uid,
fee: tx.fee,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
effectiveFeePerVsize: tx.feePerVsize,
vin: tx.vin,
sigops: tx.sigops,
inputs: tx.inputs || [],
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
ancestorMap: new Map<number, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
ancestorSigops: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
};
mempoolArray.push(auditPool[tx.txid]);
});
mempoolArray.push(auditPool.get(tx.uid) as AuditTransaction);
});
// Build relatives graph & calculate ancestor scores
@ -72,15 +72,29 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
mempoolArray.sort((a, b) => {
if (b.score === a.score) {
// tie-break by uid for stability
return a.uid < b.uid ? -1 : 1;
} else {
return (b.score || 0) - (a.score || 0);
}
});
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: ThreadTransaction[][] = [];
const blocks: number[][] = [];
let blockWeight = 4000;
let blockSize = 0;
let blockSigops = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => {
if (a.score === b.score) {
// tie-break by uid for stability
return a.uid > b.uid;
} else {
return (a.score || 0) > (b.score || 0);
}
});
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
@ -107,30 +121,36 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
if (blocks.length >= 7 || ((blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) && (blockSigops + nextTx.ancestorSigops <= 80000))) {
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
let isCluster = false;
if (sortedTxSet.length > 1) {
cpfpClusters[nextTx.txid] = sortedTxSet.map(tx => tx.txid);
cpfpClusters.set(nextTx.uid, sortedTxSet.map(tx => tx.uid));
isCluster = true;
}
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const effectiveFeeRate = Math.min(nextTx.dependencyRate || Infinity, nextTx.ancestorFee / (nextTx.ancestorWeight / 4));
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
const mempoolTx = mempool.get(ancestor.uid);
if (!mempoolTx) {
continue;
}
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
ancestor.usedBy = nextTx.uid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
if (isCluster) {
mempoolTx.cpfpRoot = nextTx.txid;
if (mempoolTx.effectiveFeePerVsize !== effectiveFeeRate) {
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
mempoolTx.dirty = true;
}
if (mempoolTx.cpfpRoot !== nextTx.uid) {
mempoolTx.cpfpRoot = isCluster ? nextTx.uid : null;
mempoolTx.dirty;
}
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
@ -138,7 +158,7 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
// remove these as valid package ancestors for any descendants remaining in the mempool
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
updateDescendants(tx, auditPool, modified, effectiveFeeRate);
});
}
@ -156,11 +176,10 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
if ((exceededPackageTries || queueEmpty) && blocks.length < 7) {
// construct this block
if (transactions.length) {
blocks.push(transactions.map(t => mempool[t.txid]));
blocks.push(transactions.map(t => t.uid));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
@ -175,50 +194,38 @@ function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
overflow = [];
}
}
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
if (tx.ancestorMap.size > 0) {
cpfpClusters[tx.txid] = Array.from(tx.ancestorMap?.values()).map(a => a.txid);
mempoolTx.cpfpRoot = tx.txid;
}
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
if (overflow.length > 0) {
logger.warn('GBT overflow list unexpectedly non-empty after final block constructed');
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(blockTransactions);
// add the final unbounded block if it contains any transactions
if (transactions.length > 0) {
blocks.push(transactions.map(t => t.uid));
}
// get map of dirty transactions
const rates = new Map<number, number>();
for (const tx of mempool.values()) {
if (tx?.dirty) {
rates.set(tx.uid, tx.effectiveFeePerVsize || tx.feePerVsize);
}
}
transactions = [];
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return { blocks, clusters: cpfpClusters };
return { blocks, rates, clusters: cpfpClusters };
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
function setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
mempool: Map<number, AuditTransaction>,
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent];
for (const parent of tx.inputs) {
const parentTx = mempool.get(parent);
if (parentTx && !tx.ancestorMap?.has(parent)) {
tx.ancestorMap.set(parent, parentTx);
parentTx.children.add(tx);
@ -227,15 +234,17 @@ function setRelatives(
setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
tx.ancestorMap.set(ancestor.uid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorSigops = tx.sigops || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
tx.ancestorSigops += ancestor.sigops;
});
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
tx.relativesSet = true;
@ -245,8 +254,9 @@ function setRelatives(
// avoids recursion to limit call stack depth
function updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
mempool: Map<number, AuditTransaction>,
modified: PairingHeap<AuditTransaction>,
clusterRate: number,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
@ -261,13 +271,15 @@ function updateDescendants(
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.uid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorMap.delete(rootTx.uid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
descendantTx.ancestorSigops -= rootTx.sigops;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
descendantTx.dependencyRate = descendantTx.dependencyRate ? Math.min(descendantTx.dependencyRate, clusterRate) : clusterRate;
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;

View file

@ -1,7 +1,7 @@
import logger from '../logger';
import * as WebSocket from 'ws';
import {
BlockExtended, TransactionExtended, WebsocketResponse,
BlockExtended, TransactionExtended, MempoolTransactionExtended, WebsocketResponse,
OptimizedStatistic, ILoadingIndicators
} from '../mempool.interfaces';
import blocks from './blocks';
@ -12,7 +12,7 @@ import { Common } from './common';
import loadingIndicators from './loading-indicators';
import config from '../config';
import transactionUtils from './transaction-utils';
import rbfCache from './rbf-cache';
import rbfCache, { ReplacementInfo } from './rbf-cache';
import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
@ -22,18 +22,69 @@ import { deepClone } from '../utils/clone';
import priceUpdater from '../tasks/price-updater';
import { ApiPrice } from '../repositories/PricesRepository';
// valid 'want' subscriptions
const wantable = [
'blocks',
'mempool-blocks',
'live-2h-chart',
'stats',
];
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
private extraInitProperties = {};
private numClients = 0;
private numConnected = 0;
private numDisconnected = 0;
private socketData: { [key: string]: string } = {};
private serializedInitData: string = '{}';
private lastRbfSummary: ReplacementInfo | null = null;
constructor() { }
setWebsocketServer(wss: WebSocket.Server) {
this.wss = wss;
}
setExtraInitProperties(property: string, value: any) {
setExtraInitData(property: string, value: any) {
this.extraInitProperties[property] = value;
this.updateSocketDataFields(this.extraInitProperties);
}
private updateSocketDataFields(data: { [property: string]: any }): void {
for (const property of Object.keys(data)) {
if (data[property] != null) {
this.socketData[property] = JSON.stringify(data[property]);
} else {
delete this.socketData[property];
}
}
this.serializedInitData = '{'
+ Object.keys(this.socketData).map(key => `"${key}": ${this.socketData[key]}`).join(', ')
+ '}';
}
private updateSocketData(): void {
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
const da = difficultyAdjustment.getDifficultyAdjustment();
this.updateSocketDataFields({
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': feeApi.getRecommendedFee(),
});
}
public getSerializedInitData(): string {
return this.serializedInitData;
}
setupConnectionHandling() {
@ -42,49 +93,76 @@ class WebsocketHandler {
}
this.wss.on('connection', (client: WebSocket) => {
this.numConnected++;
client.on('error', logger.info);
client.on('close', () => {
this.numDisconnected++;
});
client.on('message', async (message: string) => {
try {
const parsedMessage: WebsocketResponse = JSON.parse(message);
const response = {};
if (parsedMessage.action === 'want') {
client['want-blocks'] = parsedMessage.data.indexOf('blocks') > -1;
client['want-mempool-blocks'] = parsedMessage.data.indexOf('mempool-blocks') > -1;
client['want-live-2h-chart'] = parsedMessage.data.indexOf('live-2h-chart') > -1;
client['want-stats'] = parsedMessage.data.indexOf('stats') > -1;
const wantNow = {};
if (parsedMessage && parsedMessage.action === 'want' && Array.isArray(parsedMessage.data)) {
for (const sub of wantable) {
const key = `want-${sub}`;
const wants = parsedMessage.data.includes(sub);
if (wants && client['wants'] && !client[key]) {
wantNow[key] = true;
}
client[key] = wants;
}
client['wants'] = true;
}
// send initial data when a client first starts a subscription
if (wantNow['want-blocks'] || (parsedMessage && parsedMessage['refresh-blocks'])) {
response['blocks'] = this.socketData['blocks'];
}
if (wantNow['want-mempool-blocks']) {
response['mempool-blocks'] = this.socketData['mempool-blocks'];
}
if (wantNow['want-stats']) {
response['mempoolInfo'] = this.socketData['mempoolInfo'];
response['vBytesPerSecond'] = this.socketData['vBytesPerSecond'];
response['fees'] = this.socketData['fees'];
response['da'] = this.socketData['da'];
}
if (parsedMessage && parsedMessage['track-tx']) {
if (/^[a-fA-F0-9]{64}$/.test(parsedMessage['track-tx'])) {
client['track-tx'] = parsedMessage['track-tx'];
const trackTxid = client['track-tx'];
// Client is telling the transaction wasn't found
if (parsedMessage['watch-mempool']) {
const rbfCacheTxid = rbfCache.getReplacedBy(client['track-tx']);
const rbfCacheTxid = rbfCache.getReplacedBy(trackTxid);
if (rbfCacheTxid) {
response['txReplaced'] = {
response['txReplaced'] = JSON.stringify({
txid: rbfCacheTxid,
};
});
client['track-tx'] = null;
} else {
// It might have appeared before we had the time to start watching for it
const tx = memPool.getMempool()[client['track-tx']];
const tx = memPool.getMempool()[trackTxid];
if (tx) {
if (config.MEMPOOL.BACKEND === 'esplora') {
response['tx'] = tx;
response['tx'] = JSON.stringify(tx);
} else {
// tx.prevout is missing from transactions when in bitcoind mode
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction: ' + (e instanceof Error ? e.message : e));
}
}
} else {
try {
const fullTx = await transactionUtils.$getTransactionExtended(client['track-tx'], true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(client['track-tx'], true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction. ' + (e instanceof Error ? e.message : e));
client['track-mempool-tx'] = parsedMessage['track-tx'];
@ -92,6 +170,13 @@ class WebsocketHandler {
}
}
}
const tx = memPool.getMempool()[trackTxid];
if (tx && tx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: tx.position,
});
}
} else {
client['track-tx'] = null;
}
@ -123,25 +208,47 @@ class WebsocketHandler {
const index = parsedMessage['track-mempool-block'];
client['track-mempool-block'] = index;
const mBlocksWithTransactions = mempoolBlocks.getMempoolBlocksWithTransactions();
response['projected-block-transactions'] = {
response['projected-block-transactions'] = JSON.stringify({
index: index,
blockTransactions: mBlocksWithTransactions[index]?.transactions || [],
};
});
} else {
client['track-mempool-block'] = null;
}
}
if (parsedMessage && parsedMessage['track-rbf'] !== undefined) {
if (['all', 'fullRbf'].includes(parsedMessage['track-rbf'])) {
client['track-rbf'] = parsedMessage['track-rbf'];
response['rbfLatest'] = JSON.stringify(rbfCache.getRbfTrees(parsedMessage['track-rbf'] === 'fullRbf'));
} else {
client['track-rbf'] = false;
}
}
if (parsedMessage && parsedMessage['track-rbf-summary'] != null) {
if (parsedMessage['track-rbf-summary']) {
client['track-rbf-summary'] = true;
if (this.socketData['rbfSummary'] != null) {
response['rbfLatestSummary'] = this.socketData['rbfSummary'];
}
} else {
client['track-rbf-summary'] = false;
}
}
if (parsedMessage.action === 'init') {
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
if (!_blocks) {
if (!this.socketData['blocks']?.length || !this.socketData['da'] || !this.socketData['backendInfo'] || !this.socketData['conversions']) {
this.updateSocketData();
}
if (!this.socketData['blocks']?.length) {
return;
}
client.send(JSON.stringify(this.getInitData(_blocks)));
client.send(this.serializedInitData);
}
if (parsedMessage.action === 'ping') {
response['pong'] = true;
response['pong'] = JSON.stringify(true);
}
if (parsedMessage['track-donation'] && parsedMessage['track-donation'].length === 22) {
@ -157,7 +264,8 @@ class WebsocketHandler {
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
} catch (e) {
logger.debug('Error parsing websocket message: ' + (e instanceof Error ? e.message : e));
@ -186,11 +294,14 @@ class WebsocketHandler {
throw new Error('WebSocket.Server is not set');
}
this.updateSocketDataFields({ 'loadingIndicators': indicators });
const response = JSON.stringify({ loadingIndicators: indicators });
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ loadingIndicators: indicators }));
client.send(response);
});
}
@ -199,38 +310,28 @@ class WebsocketHandler {
throw new Error('WebSocket.Server is not set');
}
this.updateSocketDataFields({ 'conversions': conversionRates });
const response = JSON.stringify({ conversions: conversionRates });
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
client.send(JSON.stringify({ conversions: conversionRates }));
client.send(response);
});
}
getInitData(_blocks?: BlockExtended[]) {
if (!_blocks) {
_blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
}
return {
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': difficultyAdjustment.getDifficultyAdjustment(),
'fees': feeApi.getRecommendedFee(),
...this.extraInitProperties
};
}
handleNewStatistic(stats: OptimizedStatistic) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
const response = JSON.stringify({
'live-2h-chart': stats
});
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
@ -240,20 +341,58 @@ class WebsocketHandler {
return;
}
client.send(JSON.stringify({
'live-2h-chart': stats
}));
client.send(response);
});
}
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
handleReorg(): void {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
const da = difficultyAdjustment.getDifficultyAdjustment();
// update init data
this.updateSocketDataFields({
'blocks': blocks.getBlocks(),
'da': da?.previousTime ? da : undefined,
});
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
const response = {};
if (client['want-blocks']) {
response['blocks'] = this.socketData['blocks'];
}
if (client['want-stats']) {
response['da'] = this.socketData['da'];
}
if (Object.keys(response).length) {
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(newMempool, newTransactions, deletedTransactions.map(tx => tx.txid), true);
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions);
} else {
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
}
} else {
mempoolBlocks.updateMempoolBlocks(newMempool, true);
}
@ -265,8 +404,72 @@ class WebsocketHandler {
const rbfTransactions = Common.findRbfTransactions(newTransactions, deletedTransactions);
const da = difficultyAdjustment.getDifficultyAdjustment();
memPool.handleRbfTransactions(rbfTransactions);
const rbfChanges = rbfCache.getRbfChanges();
let rbfReplacements;
let fullRbfReplacements;
let rbfSummary;
if (Object.keys(rbfChanges.trees).length) {
rbfReplacements = rbfCache.getRbfTrees(false);
fullRbfReplacements = rbfCache.getRbfTrees(true);
rbfSummary = rbfCache.getLatestRbfSummary();
}
for (const deletedTx of deletedTransactions) {
rbfCache.evict(deletedTx.txid);
}
memPool.removeFromSpendMap(deletedTransactions);
memPool.addToSpendMap(newTransactions);
const recommendedFees = feeApi.getRecommendedFee();
const latestTransactions = memPool.getLatestTransactions();
// update init data
const socketDataFields = {
'mempoolInfo': mempoolInfo,
'vBytesPerSecond': vBytesPerSecond,
'mempool-blocks': mBlocks,
'transactions': latestTransactions,
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': recommendedFees,
};
if (rbfSummary) {
socketDataFields['rbfSummary'] = rbfSummary;
}
this.updateSocketDataFields(socketDataFields);
// cache serialized objects to avoid stringify-ing the same thing for every client
const responseCache = { ...this.socketData };
function getCachedResponse(key: string, data): string {
if (!responseCache[key]) {
responseCache[key] = JSON.stringify(data);
}
return responseCache[key];
}
// pre-compute new tracked outspends
const outspendCache: { [txid: string]: { [vout: number]: { vin: number, txid: string } } } = {};
const trackedTxs = new Set<string>();
this.wss.clients.forEach((client) => {
if (client['track-tx']) {
trackedTxs.add(client['track-tx']);
}
});
if (trackedTxs.size > 0) {
for (const tx of newTransactions) {
for (let i = 0; i < tx.vin.length; i++) {
const vin = tx.vin[i];
if (trackedTxs.has(vin.txid)) {
if (!outspendCache[vin.txid]) {
outspendCache[vin.txid] = { [vin.vout]: { vin: i, txid: tx.txid }};
} else {
outspendCache[vin.txid][vin.vout] = { vin: i, txid: tx.txid };
}
}
}
}
}
this.wss.clients.forEach(async (client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
@ -275,15 +478,17 @@ class WebsocketHandler {
const response = {};
if (client['want-stats']) {
response['mempoolInfo'] = mempoolInfo;
response['vBytesPerSecond'] = vBytesPerSecond;
response['transactions'] = newTransactions.slice(0, 6).map((tx) => Common.stripTransaction(tx));
response['da'] = da;
response['fees'] = recommendedFees;
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
response['vBytesPerSecond'] = getCachedResponse('vBytesPerSecond', vBytesPerSecond);
response['transactions'] = getCachedResponse('transactions', latestTransactions);
if (da?.previousTime) {
response['da'] = getCachedResponse('da', da);
}
response['fees'] = getCachedResponse('fees', recommendedFees);
}
if (client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
response['mempool-blocks'] = getCachedResponse('mempool-blocks', mBlocks);
}
if (client['track-mempool-tx']) {
@ -291,13 +496,13 @@ class WebsocketHandler {
if (tx) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
response['tx'] = fullTx;
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
response['tx'] = JSON.stringify(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
}
} else {
response['tx'] = tx;
response['tx'] = JSON.stringify(tx);
}
client['track-mempool-tx'] = null;
}
@ -311,7 +516,7 @@ class WebsocketHandler {
if (someVin) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
@ -325,7 +530,7 @@ class WebsocketHandler {
if (someVout) {
if (config.MEMPOOL.BACKEND !== 'esplora') {
try {
const fullTx = await transactionUtils.$getTransactionExtended(tx.txid, true);
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
foundTransactions.push(fullTx);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
@ -337,7 +542,7 @@ class WebsocketHandler {
}
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
response['address-transactions'] = JSON.stringify(foundTransactions);
}
}
@ -366,89 +571,118 @@ class WebsocketHandler {
});
if (foundTransactions.length) {
response['address-transactions'] = foundTransactions;
response['address-transactions'] = JSON.stringify(foundTransactions);
}
}
if (client['track-tx']) {
const outspends: object = {};
newTransactions.forEach((tx) => tx.vin.forEach((vin, i) => {
if (vin.txid === client['track-tx']) {
outspends[vin.vout] = {
vin: i,
txid: tx.txid,
};
}
}));
const trackTxid = client['track-tx'];
const outspends = outspendCache[trackTxid];
if (Object.keys(outspends).length) {
response['utxoSpent'] = outspends;
if (outspends && Object.keys(outspends).length) {
response['utxoSpent'] = JSON.stringify(outspends);
}
if (rbfTransactions[client['track-tx']]) {
for (const rbfTransaction in rbfTransactions) {
if (client['track-tx'] === rbfTransaction) {
response['rbfTransaction'] = {
txid: rbfTransactions[rbfTransaction].txid,
};
break;
}
}
const rbfReplacedBy = rbfCache.getReplacedBy(client['track-tx']);
if (rbfReplacedBy) {
response['rbfTransaction'] = JSON.stringify({
txid: rbfReplacedBy,
});
}
const rbfChange = rbfChanges.map[client['track-tx']];
if (rbfChange) {
response['rbfInfo'] = JSON.stringify(rbfChanges.trees[rbfChange]);
}
const mempoolTx = newMempool[trackTxid];
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: mempoolTx.position,
});
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas[index]) {
response['projected-block-transactions'] = {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
index: index,
delta: mBlockDeltas[index],
};
});
}
}
if (client['track-rbf'] === 'all' && rbfReplacements) {
response['rbfLatest'] = getCachedResponse('rbfLatest', rbfReplacements);
} else if (client['track-rbf'] === 'fullRbf' && fullRbfReplacements) {
response['rbfLatest'] = getCachedResponse('fullrbfLatest', fullRbfReplacements);
}
if (client['track-rbf-summary'] && rbfSummary) {
response['rbfLatestSummary'] = getCachedResponse('rbfLatestSummary', rbfSummary);
}
if (Object.keys(response).length) {
client.send(JSON.stringify(response));
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
this.printLogs();
const _memPool = memPool.getMempool();
const rbfTransactions = Common.findMinedRbfTransactions(transactions, memPool.getSpendMap());
memPool.handleMinedRbfTransactions(rbfTransactions);
memPool.removeFromSpendMap(transactions);
if (config.MEMPOOL.AUDIT) {
let projectedBlocks;
let auditMempool = _memPool;
// template calculation functions have mempool side effects, so calculate audits using
// a cloned copy of the mempool if we're running a different algorithm for mempool updates
const auditMempool = (config.MEMPOOL.ADVANCED_GBT_AUDIT === config.MEMPOOL.ADVANCED_GBT_MEMPOOL) ? _memPool : deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
projectedBlocks = await mempoolBlocks.makeBlockTemplates(auditMempool, false);
const separateAudit = config.MEMPOOL.ADVANCED_GBT_AUDIT !== config.MEMPOOL.ADVANCED_GBT_MEMPOOL;
if (separateAudit) {
auditMempool = deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
if (config.MEMPOOL.RUST_GBT) {
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool);
} else {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
}
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
}
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const { censored, added, fresh, sigop, fullrbf, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
}) : [];
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : [];
let totalFees = 0;
let totalWeight = 0;
for (const tx of stripped) {
totalFees += tx.fee;
totalWeight += (tx.vsize * 4);
}
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
transactions: stripped
transactions: stripped,
}
});
@ -459,25 +693,39 @@ class WebsocketHandler {
addedTxs: added,
missingTxs: censored,
freshTxs: fresh,
sigopTxs: sigop,
fullrbfTxs: fullrbf,
matchRate: matchRate,
expectedFees: totalFees,
expectedWeight: totalWeight,
});
if (block.extras) {
block.extras.matchRate = matchRate;
block.extras.expectedFees = totalFees;
block.extras.expectedWeight = totalWeight;
block.extras.similarity = similarity;
}
}
} else if (block.extras) {
const mBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if (mBlocks?.length && mBlocks[0].transactions) {
block.extras.similarity = Common.getSimilarity(mBlocks[0], transactions);
}
}
const removed: string[] = [];
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
removed.push(txId);
rbfCache.evict(txId);
rbfCache.mined(txId);
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(_memPool, [], removed, true);
if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions);
} else {
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
}
} else {
mempoolBlocks.updateMempoolBlocks(_memPool, true);
}
@ -486,29 +734,64 @@ class WebsocketHandler {
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();
const mempoolInfo = memPool.getMempoolInfo();
// update init data
this.updateSocketDataFields({
'mempoolInfo': mempoolInfo,
'blocks': [...blocks.getBlocks(), block].slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT),
'mempool-blocks': mBlocks,
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
'da': da?.previousTime ? da : undefined,
'fees': fees,
});
const responseCache = { ...this.socketData };
function getCachedResponse(key, data): string {
if (!responseCache[key]) {
responseCache[key] = JSON.stringify(data);
}
return responseCache[key];
}
this.wss.clients.forEach((client) => {
if (client.readyState !== WebSocket.OPEN) {
return;
}
if (!client['want-blocks']) {
return;
const response = {};
if (client['want-blocks']) {
response['block'] = getCachedResponse('block', block);
}
const response = {
'block': block,
'mempoolInfo': memPool.getMempoolInfo(),
'da': da,
'fees': fees,
};
if (client['want-stats']) {
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
response['vBytesPerSecond'] = getCachedResponse('vBytesPerSecond', memPool.getVBytesPerSecond());
response['fees'] = getCachedResponse('fees', fees);
if (da?.previousTime) {
response['da'] = getCachedResponse('da', da);
}
}
if (mBlocks && client['want-mempool-blocks']) {
response['mempool-blocks'] = mBlocks;
response['mempool-blocks'] = getCachedResponse('mempool-blocks', mBlocks);
}
if (client['track-tx'] && txIds.indexOf(client['track-tx']) > -1) {
response['txConfirmed'] = true;
if (client['track-tx']) {
const trackTxid = client['track-tx'];
if (trackTxid && txIds.indexOf(trackTxid) > -1) {
response['txConfirmed'] = JSON.stringify(trackTxid);
} else {
const mempoolTx = _memPool[trackTxid];
if (mempoolTx && mempoolTx.position) {
response['txPosition'] = JSON.stringify({
txid: trackTxid,
position: mempoolTx.position,
});
}
}
}
if (client['track-address']) {
@ -534,7 +817,7 @@ class WebsocketHandler {
};
});
response['block-transactions'] = foundTransactions;
response['block-transactions'] = JSON.stringify(foundTransactions);
}
}
@ -571,23 +854,45 @@ class WebsocketHandler {
};
});
response['block-transactions'] = foundTransactions;
response['block-transactions'] = JSON.stringify(foundTransactions);
}
}
if (client['track-mempool-block'] >= 0) {
const index = client['track-mempool-block'];
if (mBlockDeltas && mBlockDeltas[index]) {
response['projected-block-transactions'] = {
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
index: index,
delta: mBlockDeltas[index],
};
});
}
}
client.send(JSON.stringify(response));
if (Object.keys(response).length) {
const serializedResponse = this.serializeResponse(response);
client.send(serializedResponse);
}
});
}
// takes a dictionary of JSON serialized values
// and zips it together into a valid JSON object
private serializeResponse(response): string {
return '{'
+ Object.keys(response).map(key => `"${key}": ${response[key]}`).join(', ')
+ '}';
}
private printLogs(): void {
if (this.wss) {
const count = this.wss?.clients?.size || 0;
const diff = count - this.numClients;
this.numClients = count;
logger.debug(`${count} websocket clients | ${this.numConnected} connected | ${this.numDisconnected} disconnected | (${diff >= 0 ? '+' : ''}${diff})`);
this.numConnected = 0;
this.numDisconnected = 0;
}
}
}
export default new WebsocketHandler();

View file

@ -31,11 +31,17 @@ interface IConfig {
AUDIT: boolean;
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
RUST_GBT: boolean;
CPFP_INDEXING: boolean;
MAX_BLOCKS_BULK_QUERY: number;
DISK_CACHE_BLOCK_INTERVAL: number;
MAX_PUSH_TX_SIZE_WEIGHT: number;
ALLOW_UNREACHABLE: boolean;
};
ESPLORA: {
REST_API_URL: string;
UNIX_SOCKET_PATH: string | void | null;
RETRY_UNIX_SOCKET_AFTER: number;
};
LIGHTNING: {
ENABLED: boolean;
@ -51,6 +57,7 @@ interface IConfig {
TLS_CERT_PATH: string;
MACAROON_PATH: string;
REST_API_URL: string;
TIMEOUT: number;
};
CLIGHTNING: {
SOCKET: string;
@ -65,12 +72,14 @@ interface IConfig {
PORT: number;
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
};
SECOND_CORE_RPC: {
HOST: string;
PORT: number;
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
};
DATABASE: {
ENABLED: boolean;
@ -80,6 +89,7 @@ interface IConfig {
DATABASE: string;
USERNAME: string;
PASSWORD: string;
TIMEOUT: number;
};
SYSLOG: {
ENABLED: boolean;
@ -122,6 +132,12 @@ interface IConfig {
GEOLITE2_ASN: string;
GEOIP2_ISP: string;
},
REPLICATION: {
ENABLED: boolean;
AUDIT: boolean;
AUDIT_START_HEIGHT: number;
SERVERS: string[];
}
}
const defaults: IConfig = {
@ -153,11 +169,17 @@ const defaults: IConfig = {
'AUDIT': false,
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'RUST_GBT': false,
'CPFP_INDEXING': false,
'MAX_BLOCKS_BULK_QUERY': 0,
'DISK_CACHE_BLOCK_INTERVAL': 6,
'MAX_PUSH_TX_SIZE_WEIGHT': 400000,
'ALLOW_UNREACHABLE': true,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
'UNIX_SOCKET_PATH': null,
'RETRY_UNIX_SOCKET_AFTER': 30000,
},
'ELECTRUM': {
'HOST': '127.0.0.1',
@ -168,13 +190,15 @@ const defaults: IConfig = {
'HOST': '127.0.0.1',
'PORT': 8332,
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
'PASSWORD': 'mempool',
'TIMEOUT': 60000,
},
'SECOND_CORE_RPC': {
'HOST': '127.0.0.1',
'PORT': 8332,
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
'PASSWORD': 'mempool',
'TIMEOUT': 60000,
},
'DATABASE': {
'ENABLED': true,
@ -183,7 +207,8 @@ const defaults: IConfig = {
'PORT': 3306,
'DATABASE': 'mempool',
'USERNAME': 'mempool',
'PASSWORD': 'mempool'
'PASSWORD': 'mempool',
'TIMEOUT': 180000,
},
'SYSLOG': {
'ENABLED': true,
@ -214,6 +239,7 @@ const defaults: IConfig = {
'TLS_CERT_PATH': '',
'MACAROON_PATH': '',
'REST_API_URL': 'https://localhost:8080',
'TIMEOUT': 10000,
},
'CLIGHTNING': {
'SOCKET': '',
@ -244,6 +270,12 @@ const defaults: IConfig = {
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
},
'REPLICATION': {
'ENABLED': false,
'AUDIT': false,
'AUDIT_START_HEIGHT': 774000,
'SERVERS': [],
}
};
class Config implements IConfig {
@ -263,6 +295,7 @@ class Config implements IConfig {
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
MAXMIND: IConfig['MAXMIND'];
REPLICATION: IConfig['REPLICATION'];
constructor() {
const configs = this.merge(configFromFile, defaults);
@ -282,6 +315,7 @@ class Config implements IConfig {
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
this.MAXMIND = configs.MAXMIND;
this.REPLICATION = configs.REPLICATION;
}
merge = (...objects: object[]): IConfig => {

View file

@ -30,11 +30,64 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
}
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
OkPacket[] | ResultSetHeader>(query, params?, connection?: PoolConnection): Promise<[T, FieldPacket[]]>
{
this.checkDBFlag();
let hardTimeout;
if (query?.timeout != null) {
hardTimeout = Math.floor(query.timeout * 1.1);
} else {
hardTimeout = config.DATABASE.TIMEOUT;
}
if (hardTimeout > 0) {
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error(`DB query failed to return, reject or time out within ${hardTimeout / 1000}s - ${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}`));
}, hardTimeout);
// Use a specific connection if provided, otherwise delegate to the pool
const connectionPromise = connection ? Promise.resolve(connection) : this.getPool();
connectionPromise.then((pool: PoolConnection | Pool) => {
return pool.query(query, params) as Promise<[T, FieldPacket[]]>;
}).then(result => {
resolve(result);
}).catch(error => {
reject(error);
}).finally(() => {
clearTimeout(timer);
});
});
} else {
const pool = await this.getPool();
return pool.query(query, params);
}
}
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
OkPacket[] | ResultSetHeader>(queries: { query, params }[]): Promise<[T, FieldPacket[]][]>
{
const pool = await this.getPool();
return pool.query(query, params);
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
const results: [T, FieldPacket[]][] = [];
for (const query of queries) {
const result = await this.query(query.query, query.params, connection) as [T, FieldPacket[]];
results.push(result);
}
await connection.commit();
return results;
} catch (e) {
logger.err('Could not complete db transaction, rolling back: ' + (e instanceof Error ? e.message : e));
connection.rollback();
connection.release();
throw e;
} finally {
connection.release();
}
}
public async checkDbConnection() {

View file

@ -2,6 +2,7 @@ import express from 'express';
import { Application, Request, Response, NextFunction } from 'express';
import * as http from 'http';
import * as WebSocket from 'ws';
import bitcoinApi from './api/bitcoin/bitcoin-api-factory';
import cluster from 'cluster';
import DB from './database';
import config from './config';
@ -45,7 +46,8 @@ class Server {
private wss: WebSocket.Server | undefined;
private server: http.Server | undefined;
private app: Application;
private currentBackendRetryInterval = 5;
private currentBackendRetryInterval = 1;
private backendRetryCount = 0;
private maxHeapSize: number = 0;
private heapLogInterval: number = 60;
@ -120,7 +122,7 @@ class Server {
await poolsUpdater.updatePoolsJson(); // Needs to be done before loading the disk cache because we sometimes wipe it
await syncAssets.syncAssets$();
if (config.MEMPOOL.ENABLED) {
diskCache.loadMempoolCache();
await diskCache.$loadMempoolCache();
}
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
@ -148,7 +150,7 @@ class Server {
if (config.BISQ.ENABLED) {
bisq.startBisqService();
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitProperties('bsq-price', price));
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitData('bsq-price', price));
blocks.setNewBlockCallback(bisq.handleNewBitcoinBlock.bind(bisq));
bisqMarkets.startBisqService();
}
@ -167,6 +169,7 @@ class Server {
}
async runMainUpdateLoop(): Promise<void> {
const start = Date.now();
try {
try {
await memPool.$updateMemPoolInfo();
@ -178,22 +181,28 @@ class Server {
logger.debug(msg);
}
}
await blocks.$updateBlocks();
await memPool.$updateMempool();
const newMempool = await bitcoinApi.$getRawMempool();
const numHandledBlocks = await blocks.$updateBlocks();
if (numHandledBlocks === 0) {
await memPool.$updateMempool(newMempool);
}
indexer.$run();
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
this.currentBackendRetryInterval = 5;
// rerun immediately if we skipped the mempool update, otherwise wait POLL_RATE_MS
const elapsed = Date.now() - start;
const remainingTime = Math.max(0, config.MEMPOOL.POLL_RATE_MS - elapsed)
setTimeout(this.runMainUpdateLoop.bind(this), numHandledBlocks > 0 ? 0 : remainingTime);
this.backendRetryCount = 0;
} catch (e: any) {
let loggerMsg = `Exception in runMainUpdateLoop(). Retrying in ${this.currentBackendRetryInterval} sec.`;
this.backendRetryCount++;
let loggerMsg = `Exception in runMainUpdateLoop() (count: ${this.backendRetryCount}). Retrying in ${this.currentBackendRetryInterval} sec.`;
loggerMsg += ` Reason: ${(e instanceof Error ? e.message : e)}.`;
if (e?.stack) {
loggerMsg += ` Stack trace: ${e.stack}`;
}
// When we get a first Exception, only `logger.debug` it and retry after 5 seconds
// From the second Exception, `logger.warn` the Exception and increase the retry delay
// Maximum retry delay is 60 seconds
if (this.currentBackendRetryInterval > 5) {
if (this.backendRetryCount >= 5) {
logger.warn(loggerMsg);
mempool.setOutOfSync();
} else {
@ -203,8 +212,8 @@ class Server {
logger.debug(`AxiosError: ${e?.message}`);
}
setTimeout(this.runMainUpdateLoop.bind(this), 1000 * this.currentBackendRetryInterval);
this.currentBackendRetryInterval *= 2;
this.currentBackendRetryInterval = Math.min(this.currentBackendRetryInterval, 60);
} finally {
diskCache.unlock();
}
}
@ -215,11 +224,11 @@ class Server {
await lightningStatsUpdater.$startService();
await forensicsService.$startService();
} catch(e) {
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Exception in $runLightningBackend. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
await Common.sleep$(1000 * 60);
this.$runLightningBackend();
};
}
}
setUpWebsocketHandling(): void {
if (this.wss) {
@ -237,7 +246,7 @@ class Server {
websocketHandler.setupConnectionHandling();
if (config.MEMPOOL.ENABLED) {
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.$handleMempoolChange.bind(websocketHandler));
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
}
priceUpdater.setRatesChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
@ -275,7 +284,7 @@ class Server {
if (!this.warnedHeapCritical && this.maxHeapSize > warnThreshold) {
this.warnedHeapCritical = true;
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit * 100).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
}
if (this.lastHeapLogTime === null || (now - this.lastHeapLogTime) > (this.heapLogInterval * 1000)) {
logger.debug(`Memory usage: ${formatBytes(this.maxHeapSize, byteUnits)} / ${formatBytes(stats.heap_size_limit, byteUnits)}`);

View file

@ -6,6 +6,8 @@ import logger from './logger';
import bitcoinClient from './api/bitcoin/bitcoin-client';
import priceUpdater from './tasks/price-updater';
import PricesRepository from './repositories/PricesRepository';
import config from './config';
import auditReplicator from './replication/AuditReplication';
export interface CoreIndex {
name: string;
@ -72,7 +74,7 @@ class Indexer {
return;
}
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
if (task === 'blocksPrices' && !this.tasksRunning.includes(task) && !['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
@ -134,6 +136,8 @@ class Indexer {
await mining.$generatePoolHashrateHistory();
await blocks.$generateBlocksSummariesDatabase();
await blocks.$generateCPFPDatabase();
await blocks.$generateAuditStats();
await auditReplicator.$sync();
} catch (e) {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));

View file

@ -69,6 +69,10 @@ class Logger {
this.network = this.getNetwork();
}
public updateNetwork(): void {
this.network = this.getNetwork();
}
private addprio(prio): void {
this[prio] = (function(_this) {
return function(msg, tag?: string) {

View file

@ -19,6 +19,7 @@ export interface PoolInfo {
blockCount: number;
slug: string;
avgMatchRate: number | null;
avgFeeDelta: number | null;
}
export interface PoolStats extends PoolInfo {
@ -32,13 +33,19 @@ export interface BlockAudit {
hash: string,
missingTxs: string[],
freshTxs: string[],
sigopTxs: string[],
fullrbfTxs: string[],
addedTxs: string[],
matchRate: number,
expectedFees?: number,
expectedWeight?: number,
}
export interface AuditScore {
hash: string,
matchRate?: number,
expectedFees?: number
expectedWeight?: number
}
export interface MempoolBlock {
@ -58,6 +65,7 @@ export interface MempoolBlockWithTransactions extends MempoolBlock {
export interface MempoolBlockDelta {
added: TransactionStripped[];
removed: string[];
changed: { txid: string, rate: number | undefined }[];
}
interface VinStrippedToScriptsig {
@ -79,25 +87,54 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
descendants?: Ancestor[];
bestDescendant?: BestDescendant | null;
cpfpChecked?: boolean;
deleteAfter?: number;
position?: {
block: number,
vsize: number,
};
uid?: number;
}
export interface MempoolTransactionExtended extends TransactionExtended {
order: number;
sigops: number;
adjustedVsize: number;
adjustedFeePerVsize: number;
inputs?: number[];
lastBoosted?: number;
}
export interface AuditTransaction {
txid: string;
uid: number;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize: number;
vin: string[];
sigops: number;
inputs: number[];
relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>;
ancestorMap: Map<number, AuditTransaction>;
children: Set<AuditTransaction>;
ancestorFee: number;
ancestorWeight: number;
ancestorSigops: number;
score: number;
used: boolean;
modified: boolean;
modifiedNode: HeapNode<AuditTransaction>;
dependencyRate?: number;
}
export interface CompactThreadTransaction {
uid: number;
fee: number;
weight: number;
sigops: number;
feePerVsize: number;
effectiveFeePerVsize: number;
inputs: number[];
cpfpRoot?: number;
cpfpChecked?: boolean;
dirty?: boolean;
}
export interface ThreadTransaction {
@ -106,7 +143,7 @@ export interface ThreadTransaction {
weight: number;
feePerVsize: number;
effectiveFeePerVsize?: number;
vin: string[];
inputs: number[];
cpfpRoot?: string;
cpfpChecked?: boolean;
}
@ -145,6 +182,7 @@ export interface TransactionStripped {
fee: number;
vsize: number;
value: number;
rate?: number; // effective fee rate
}
export interface BlockExtension {
@ -153,6 +191,9 @@ export interface BlockExtension {
feeRange: number[]; // fee rate percentiles
reward: number;
matchRate: number | null;
expectedFees: number | null;
expectedWeight: number | null;
similarity?: number;
pool: {
id: number; // Note - This is the `unique_id`, not to mix with the auto increment `id`
name: string;
@ -188,6 +229,7 @@ export interface BlockExtension {
*/
export interface BlockExtended extends IEsploraApi.Block {
extras: BlockExtension;
canonical?: string;
}
export interface BlockSummary {
@ -195,6 +237,15 @@ export interface BlockSummary {
transactions: TransactionStripped[];
}
export interface AuditSummary extends BlockAudit {
timestamp?: number,
size?: number,
weight?: number,
tx_count?: number,
transactions: TransactionStripped[];
template?: TransactionStripped[];
}
export interface BlockPrice {
height: number;
priceId: number;
@ -213,6 +264,28 @@ export interface MempoolStats {
tx_count: number;
}
export interface EffectiveFeeStats {
medianFee: number; // median effective fee rate
feeRange: number[]; // 2nd, 10th, 25th, 50th, 75th, 90th, 98th percentiles
}
export interface WorkingEffectiveFeeStats extends EffectiveFeeStats {
minFee: number;
maxFee: number;
}
export interface CpfpCluster {
root: string,
height: number,
txs: Ancestor[],
effectiveFeePerVsize: number,
}
export interface CpfpSummary {
transactions: TransactionExtended[];
clusters: CpfpCluster[];
}
export interface Statistic {
id?: number;
added: string;
@ -308,9 +381,11 @@ export interface IDifficultyAdjustment {
remainingBlocks: number;
remainingTime: number;
previousRetarget: number;
previousTime: number;
nextRetargetHeight: number;
timeAvg: number;
timeOffset: number;
expectedBlocks: number;
}
export interface IndexedDifficultyAdjustment {

View file

@ -0,0 +1,134 @@
import DB from '../database';
import logger from '../logger';
import { AuditSummary } from '../mempool.interfaces';
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import blocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import { $sync } from './replicator';
import config from '../config';
import { Common } from '../api/common';
import blocks from '../api/blocks';
const BATCH_SIZE = 16;
/**
* Syncs missing block template and audit data from trusted servers
*/
class AuditReplication {
inProgress: boolean = false;
skip: Set<string> = new Set();
public async $sync(): Promise<void> {
if (!config.REPLICATION.ENABLED || !config.REPLICATION.AUDIT) {
// replication not enabled
return;
}
if (this.inProgress) {
logger.info(`AuditReplication sync already in progress`, 'Replication');
return;
}
this.inProgress = true;
const missingAudits = await this.$getMissingAuditBlocks();
logger.debug(`Fetching missing audit data for ${missingAudits.length} blocks from trusted servers`, 'Replication');
let totalSynced = 0;
let totalMissed = 0;
let loggerTimer = Date.now();
// process missing audits in batches of
for (let i = 0; i < missingAudits.length; i += BATCH_SIZE) {
const slice = missingAudits.slice(i, i + BATCH_SIZE);
const results = await Promise.all(slice.map(hash => this.$syncAudit(hash)));
const synced = results.reduce((total, status) => status ? total + 1 : total, 0);
totalSynced += synced;
totalMissed += (slice.length - synced);
if (Date.now() - loggerTimer > 10000) {
loggerTimer = Date.now();
logger.info(`Found ${totalSynced} / ${totalSynced + totalMissed} of ${missingAudits.length} missing audits`, 'Replication');
}
await Common.sleep$(1000);
}
logger.debug(`Fetched ${totalSynced} audits, ${totalMissed} still missing`, 'Replication');
this.inProgress = false;
}
private async $syncAudit(hash: string): Promise<boolean> {
if (this.skip.has(hash)) {
// we already know none of our trusted servers have this audit
return false;
}
let success = false;
// start with a random server so load is uniformly spread
const syncResult = await $sync(`/api/v1/block/${hash}/audit-summary`);
if (syncResult) {
if (syncResult.data?.template?.length) {
await this.$saveAuditData(hash, syncResult.data);
logger.info(`Imported audit data from ${syncResult.server} for block ${syncResult.data.height} (${hash})`);
success = true;
}
if (!syncResult.data && !syncResult.exists) {
this.skip.add(hash);
}
}
return success;
}
private async $getMissingAuditBlocks(): Promise<string[]> {
try {
const startHeight = config.REPLICATION.AUDIT_START_HEIGHT || 0;
const [rows]: any[] = await DB.query(`
SELECT auditable.hash, auditable.height
FROM (
SELECT hash, height
FROM blocks
WHERE height >= ?
) AS auditable
LEFT JOIN blocks_audits ON auditable.hash = blocks_audits.hash
WHERE blocks_audits.hash IS NULL
ORDER BY auditable.height DESC
`, [startHeight]);
return rows.map(row => row.hash);
} catch (e: any) {
logger.err(`Cannot fetch missing audit blocks from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
private async $saveAuditData(blockHash: string, auditSummary: AuditSummary): Promise<void> {
// save audit & template to DB
await blocksSummariesRepository.$saveTemplate({
height: auditSummary.height,
template: {
id: blockHash,
transactions: auditSummary.template || []
}
});
await blocksAuditsRepository.$saveAudit({
hash: blockHash,
height: auditSummary.height,
time: auditSummary.timestamp || auditSummary.time,
missingTxs: auditSummary.missingTxs || [],
addedTxs: auditSummary.addedTxs || [],
freshTxs: auditSummary.freshTxs || [],
sigopTxs: auditSummary.sigopTxs || [],
fullrbfTxs: auditSummary.fullrbfTxs || [],
matchRate: auditSummary.matchRate,
expectedFees: auditSummary.expectedFees,
expectedWeight: auditSummary.expectedWeight,
});
// add missing data to cached blocks
const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash);
if (cachedBlock) {
cachedBlock.extras.matchRate = auditSummary.matchRate;
cachedBlock.extras.expectedFees = auditSummary.expectedFees || null;
cachedBlock.extras.expectedWeight = auditSummary.expectedWeight || null;
}
}
}
export default new AuditReplication();

View file

@ -0,0 +1,70 @@
import config from '../config';
import backendInfo from '../api/backend-info';
import axios, { AxiosResponse } from 'axios';
import { SocksProxyAgent } from 'socks-proxy-agent';
import * as https from 'https';
export async function $sync(path): Promise<{ data?: any, exists: boolean, server?: string }> {
// start with a random server so load is uniformly spread
let allMissing = true;
const offset = Math.floor(Math.random() * config.REPLICATION.SERVERS.length);
for (let i = 0; i < config.REPLICATION.SERVERS.length; i++) {
const server = config.REPLICATION.SERVERS[(i + offset) % config.REPLICATION.SERVERS.length];
// don't query ourself
if (server === backendInfo.getBackendInfo().hostname) {
continue;
}
try {
const result = await query(`https://${server}${path}`);
if (result) {
return { data: result, exists: true, server };
}
} catch (e: any) {
if (e?.response?.status === 404) {
// this server is also missing this data
} else {
// something else went wrong
allMissing = false;
}
}
}
return { exists: !allMissing };
}
export async function query(path): Promise<object> {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpsAgent?: https.Agent;
};
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
if (config.SOCKS5PROXY.ENABLED) {
const socksOptions = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT,
username: config.SOCKS5PROXY.USERNAME || 'circuit0',
password: config.SOCKS5PROXY.PASSWORD,
};
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
const data: AxiosResponse = await axios.get(path, axiosOptions);
if (data.statusText === 'error' || !data.data) {
throw new Error(`${data.status}`);
}
return data.data;
}

View file

@ -6,20 +6,32 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, fullrbf_txs, match_rate, expected_fees, expected_weight)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
} else {
logger.err(`Cannot save block audit into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
public async $getBlockPredictionsHistory(div: number, interval: string | null): Promise<any> {
public async $setSummary(hash: string, expectedFees: number, expectedWeight: number) {
try {
await DB.query(`
UPDATE blocks_audits SET
expected_fees = ?,
expected_weight = ?
WHERE hash = ?
`, [expectedFees, expectedWeight, hash]);
} catch (e: any) {
logger.err(`Cannot update block audit in db. Reason: ` + (e instanceof Error ? e.message : e));
}
}
public async $getBlocksHealthHistory(div: number, interval: string | null): Promise<any> {
try {
let query = `SELECT UNIX_TIMESTAMP(time) as time, height, match_rate FROM blocks_audits`;
@ -32,17 +44,17 @@ class BlocksAuditRepositories {
const [rows] = await DB.query(query);
return rows;
} catch (e: any) {
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot fetch blocks health history. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getPredictionsCount(): Promise<number> {
public async $getBlocksHealthCount(): Promise<number> {
try {
const [rows] = await DB.query(`SELECT count(hash) as count FROM blocks_audits`);
return rows[0].count;
} catch (e: any) {
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot fetch blocks health count. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
@ -50,12 +62,18 @@ class BlocksAuditRepositories {
public async $getBlockAudit(hash: string): Promise<any> {
try {
const [rows]: any[] = await DB.query(
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
blocks.weight, blocks.tx_count,
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
`SELECT blocks_audits.height, blocks_audits.hash as id, UNIX_TIMESTAMP(blocks_audits.time) as timestamp,
template,
missing_txs as missingTxs,
added_txs as addedTxs,
fresh_txs as freshTxs,
sigop_txs as sigopTxs,
fullrbf_txs as fullrbfTxs,
match_rate as matchRate,
expected_fees as expectedFees,
expected_weight as expectedWeight
FROM blocks_audits
JOIN blocks ON blocks.hash = blocks_audits.hash
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
JOIN blocks_templates ON blocks_templates.id = blocks_audits.hash
WHERE blocks_audits.hash = "${hash}"
`);
@ -63,12 +81,11 @@ class BlocksAuditRepositories {
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].sigopTxs = JSON.parse(rows[0].sigopTxs);
rows[0].fullrbfTxs = JSON.parse(rows[0].fullrbfTxs);
rows[0].template = JSON.parse(rows[0].template);
if (rows[0].transactions.length) {
return rows[0];
}
return rows[0];
}
return null;
} catch (e: any) {
@ -80,7 +97,7 @@ class BlocksAuditRepositories {
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
`SELECT hash, match_rate as matchRate, expected_fees as expectedFees, expected_weight as expectedWeight
FROM blocks_audits
WHERE blocks_audits.hash = "${hash}"
`);
@ -94,7 +111,7 @@ class BlocksAuditRepositories {
public async $getBlockAuditScores(maxHeight: number, minHeight: number): Promise<AuditScore[]> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
`SELECT hash, match_rate as matchRate, expected_fees as expectedFees, expected_weight as expectedWeight
FROM blocks_audits
WHERE blocks_audits.height BETWEEN ? AND ?
`, [minHeight, maxHeight]);
@ -104,6 +121,32 @@ class BlocksAuditRepositories {
throw e;
}
}
public async $getBlocksWithoutSummaries(): Promise<string[]> {
try {
const [fromRows]: any[] = await DB.query(`
SELECT height
FROM blocks_audits
WHERE expected_fees IS NULL
ORDER BY height DESC
LIMIT 1
`);
if (!fromRows?.length) {
return [];
}
const fromHeight = fromRows[0].height;
const [idRows]: any[] = await DB.query(`
SELECT hash
FROM blocks_audits
WHERE height <= ?
ORDER BY height DESC
`, [fromHeight]);
return idRows.map(row => row.hash);
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksAuditRepositories();

View file

@ -1,4 +1,4 @@
import { BlockExtended, BlockExtension, BlockPrice } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockPrice, EffectiveFeeStats } from '../mempool.interfaces';
import DB from '../database';
import logger from '../logger';
import { Common } from '../api/common';
@ -13,6 +13,48 @@ import chainTips from '../api/chain-tips';
import blocks from '../api/blocks';
import BlocksAuditsRepository from './BlocksAuditsRepository';
interface DatabaseBlock {
id: string;
height: number;
version: number;
timestamp: number;
bits: number;
nonce: number;
difficulty: number;
merkle_root: string;
tx_count: number;
size: number;
weight: number;
previousblockhash: string;
mediantime: number;
totalFees: number;
medianFee: number;
feeRange: string;
reward: number;
poolId: number;
poolName: string;
poolSlug: string;
avgFee: number;
avgFeeRate: number;
coinbaseRaw: string;
coinbaseAddress: string;
coinbaseSignature: string;
coinbaseSignatureAscii: string;
avgTxSize: number;
totalInputs: number;
totalOutputs: number;
totalOutputAmt: number;
medianFeeAmt: number;
feePercentiles: string;
segwitTotalTxs: number;
segwitTotalSize: number;
segwitTotalWeight: number;
header: string;
utxoSetChange: number;
utxoSetSize: number;
totalInputAmt: number;
}
const BLOCK_DB_FIELDS = `
blocks.hash AS id,
blocks.height,
@ -52,7 +94,7 @@ const BLOCK_DB_FIELDS = `
blocks.header,
blocks.utxoset_change AS utxoSetChange,
blocks.utxoset_size AS utxoSetSize,
blocks.total_input_amt AS totalInputAmts
blocks.total_input_amt AS totalInputAmt
`;
class BlocksRepository {
@ -171,6 +213,32 @@ class BlocksRepository {
}
}
/**
* Update missing fee amounts fields
*
* @param blockHash
* @param feeAmtPercentiles
* @param medianFeeAmt
*/
public async $updateFeeAmounts(blockHash: string, feeAmtPercentiles, medianFeeAmt) : Promise<void> {
try {
const query = `
UPDATE blocks
SET fee_percentiles = ?, median_fee_amt = ?
WHERE hash = ?
`;
const params: any[] = [
JSON.stringify(feeAmtPercentiles),
medianFeeAmt,
blockHash
];
await DB.query(query, params);
} catch (e: any) {
logger.err(`Cannot update fee amounts for block ${blockHash}. Reason: ' + ${e instanceof Error ? e.message : e}`);
throw e;
}
}
/**
* Get all block height that have not been indexed between [startHeight, endHeight]
*/
@ -330,6 +398,55 @@ class BlocksRepository {
}
}
/**
* Get average block health for all blocks for a single pool
*/
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number | null> {
const params: any[] = [];
const query = `
SELECT AVG(blocks_audits.match_rate) AS avg_match_rate
FROM blocks
JOIN blocks_audits ON blocks.height = blocks_audits.height
WHERE blocks.pool_id = ?
`;
params.push(poolId);
try {
const [rows] = await DB.query(query, params);
if (!rows[0] || rows[0].avg_match_rate == null) {
return null;
}
return Math.round(rows[0].avg_match_rate * 100) / 100;
} catch (e) {
logger.err(`Cannot get average block health for pool id ${poolId}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get average block health for all blocks for a single pool
*/
public async $getTotalRewardForPoolId(poolId: number): Promise<number> {
const params: any[] = [];
const query = `
SELECT sum(reward) as total_reward
FROM blocks
WHERE blocks.pool_id = ?
`;
params.push(poolId);
try {
const [rows] = await DB.query(query, params);
if (!rows[0] || !rows[0].total_reward) {
return 0;
}
return rows[0].total_reward;
} catch (e) {
logger.err(`Cannot get total reward for pool id ${poolId}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get the oldest indexed block
*/
@ -383,7 +500,7 @@ class BlocksRepository {
const blocks: BlockExtended[] = [];
for (const block of rows) {
blocks.push(await this.formatDbBlockIntoExtendedBlock(block));
blocks.push(await this.formatDbBlockIntoExtendedBlock(block as DatabaseBlock));
}
return blocks;
@ -410,37 +527,13 @@ class BlocksRepository {
return null;
}
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
return await this.formatDbBlockIntoExtendedBlock(rows[0] as DatabaseBlock);
} catch (e) {
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get one block by hash
*/
public async $getBlockByHash(hash: string): Promise<object | null> {
try {
const query = `
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE hash = ?;
`;
const [rows]: any[] = await DB.query(query, [hash]);
if (rows.length <= 0) {
return null;
}
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
} catch (e) {
logger.err(`Cannot get indexed block ${hash}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Return blocks difficulty
*/
@ -484,19 +577,6 @@ class BlocksRepository {
}
}
/**
* Return blocks height
*/
public async $getBlocksHeightsAndTimestamp(): Promise<object[]> {
try {
const [rows]: any[] = await DB.query(`SELECT height, blockTimestamp as timestamp FROM blocks`);
return rows;
} catch (e) {
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get general block stats
*/
@ -550,7 +630,6 @@ class BlocksRepository {
if (blocks[idx].previous_block_hash !== blocks[idx - 1].hash) {
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}`);
await this.$deleteBlocksFrom(blocks[idx - 1].height);
await BlocksSummariesRepository.$deleteBlocksFrom(blocks[idx - 1].height);
await HashratesRepository.$deleteHashratesFromTimestamp(blocks[idx - 1].timestamp - 604800);
await DifficultyAdjustmentsRepository.$deleteAdjustementsFromHeight(blocks[idx - 1].height);
return false;
@ -570,7 +649,7 @@ class BlocksRepository {
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks from height ${blockHeight} from the database`);
logger.info(`Delete newer blocks from height ${blockHeight} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM blocks where height >= ${blockHeight}`);
@ -785,7 +864,7 @@ class BlocksRepository {
/**
* Get all blocks which have not be linked to a price yet
*/
public async $getBlocksWithoutPrice(): Promise<object[]> {
public async $getBlocksWithoutPrice(): Promise<object[]> {
try {
const [rows]: any[] = await DB.query(`
SELECT UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.height
@ -797,7 +876,7 @@ class BlocksRepository {
return rows;
} catch (e) {
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
return [];
}
}
@ -817,7 +896,6 @@ class BlocksRepository {
logger.debug(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] because it has already been indexed, ignoring`);
} else {
logger.err(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
@ -836,7 +914,7 @@ class BlocksRepository {
return blocks;
} catch (e) {
logger.err(`Cannot get blocks with missing coinstatsindex. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
return [];
}
}
@ -859,13 +937,32 @@ class BlocksRepository {
}
}
/**
* Save indexed effective fee statistics
*
* @param id
* @param feeStats
*/
public async $saveEffectiveFeeStats(id: string, feeStats: EffectiveFeeStats): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET median_fee = ?, fee_span = ?
WHERE hash = ?`,
[feeStats.medianFee, JSON.stringify(feeStats.feeRange), id]
);
} catch (e) {
logger.err(`Cannot update block fee stats. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Convert a mysql row block into a BlockExtended. Note that you
* must provide the correct field into dbBlk object param
*
* @param dbBlk
*/
private async formatDbBlockIntoExtendedBlock(dbBlk: any): Promise<BlockExtended> {
private async formatDbBlockIntoExtendedBlock(dbBlk: DatabaseBlock): Promise<BlockExtended> {
const blk: Partial<BlockExtended> = {};
const extras: Partial<BlockExtension> = {};
@ -921,14 +1018,19 @@ class BlocksRepository {
// Match rate is not part of the blocks table, but it is part of APIs so we must include it
extras.matchRate = null;
extras.expectedFees = null;
extras.expectedWeight = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(dbBlk.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
extras.expectedFees = auditScore.expectedFees;
extras.expectedWeight = auditScore.expectedWeight;
}
}
// If we're missing block summary related field, check if we can populate them on the fly now
// This is for example triggered upon re-org
if (Common.blocksSummariesIndexingEnabled() &&
(extras.medianFeeAmt === null || extras.feePercentiles === null))
{
@ -936,11 +1038,12 @@ class BlocksRepository {
if (extras.feePercentiles === null) {
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
const summary = blocks.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
await BlocksSummariesRepository.$saveTransactions(dbBlk.height, dbBlk.id, summary.transactions);
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
}
if (extras.feePercentiles !== null) {
extras.medianFeeAmt = extras.feePercentiles[3];
await this.$updateFeeAmounts(dbBlk.id, extras.feePercentiles, extras.medianFeeAmt);
}
}

View file

@ -1,6 +1,6 @@
import DB from '../database';
import logger from '../logger';
import { BlockSummary } from '../mempool.interfaces';
import { BlockSummary, TransactionStripped } from '../mempool.interfaces';
class BlocksSummariesRepository {
public async $getByBlockId(id: string): Promise<BlockSummary | undefined> {
@ -17,23 +17,17 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id;
public async $saveTransactions(blockHeight: number, blockId: string, transactions: TransactionStripped[]): Promise<void> {
try {
const transactions = JSON.stringify(params.mined?.transactions || []);
const transactionsStr = JSON.stringify(transactions);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
transactions = ?
`, [params.height, blockId, transactions, '[]', transactions]);
INSERT INTO blocks_summaries
SET height = ?, transactions = ?, id = ?
ON DUPLICATE KEY UPDATE transactions = ?`,
[blockHeight, transactionsStr, blockId, transactionsStr]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block summary for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
logger.debug(`Cannot save block summary transactions for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
@ -42,21 +36,35 @@ class BlocksSummariesRepository {
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
INSERT INTO blocks_templates (id, template)
VALUE (?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
`, [blockId, transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
logger.warn(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
}
}
}
public async $getTemplate(id: string): Promise<BlockSummary | undefined> {
try {
const [templates]: any[] = await DB.query(`SELECT * from blocks_templates WHERE id = ?`, [id]);
if (templates.length > 0) {
return {
id: templates[0].id,
transactions: JSON.parse(templates[0].template),
};
}
} catch (e) {
logger.err(`Cannot get block template for block id ${id}. Reason: ` + (e instanceof Error ? e.message : e));
}
return undefined;
}
public async $getIndexedSummariesId(): Promise<string[]> {
try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
@ -68,19 +76,6 @@ class BlocksSummariesRepository {
return [];
}
/**
* Delete blocks from the database from blockHeight
*/
public async $deleteBlocksFrom(blockHeight: number) {
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
try {
await DB.query(`DELETE FROM blocks_summaries where height >= ${blockHeight}`);
} catch (e) {
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Get the fee percentiles if the block has already been indexed, [] otherwise
*

View file

@ -1,62 +1,19 @@
import cluster, { Cluster } from 'cluster';
import { RowDataPacket } from 'mysql2';
import DB from '../database';
import logger from '../logger';
import { Ancestor } from '../mempool.interfaces';
import { Ancestor, CpfpCluster } from '../mempool.interfaces';
import transactionRepository from '../repositories/TransactionRepository';
class CpfpRepository {
public async $saveCluster(clusterRoot: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<boolean> {
if (!txs[0]) {
return false;
}
// skip clusters of transactions with the same fees
const roundedEffectiveFee = Math.round(effectiveFeePerVsize * 100) / 100;
const equalFee = txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (equalFee) {
return false;
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[]): Promise<boolean> {
try {
const packedTxs = Buffer.from(this.pack(txs));
await DB.query(
`
INSERT INTO compact_cpfp_clusters(root, height, txs, fee_rate)
VALUE (UNHEX(?), ?, ?, ?)
ON DUPLICATE KEY UPDATE
height = ?,
txs = ?,
fee_rate = ?
`,
[clusterRoot, height, packedTxs, effectiveFeePerVsize, height, packedTxs, effectiveFeePerVsize]
);
const maxChunk = 10;
let chunkIndex = 0;
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk).map(tx => {
return { txid: tx.txid, cluster: clusterRoot };
});
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: any, effectiveFeePerVsize: number}[]): Promise<boolean> {
try {
const clusterValues: any[] = [];
const txs: any[] = [];
const clusterValues: [string, number, Buffer, number][] = [];
const txs: { txid: string, cluster: string }[] = [];
for (const cluster of clusters) {
if (cluster.txs?.length > 1) {
if (cluster.txs?.length) {
const roundedEffectiveFee = Math.round(cluster.effectiveFeePerVsize * 100) / 100;
const equalFee = cluster.txs.reduce((acc, tx) => {
const equalFee = cluster.txs.length > 1 && cluster.txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (!equalFee) {
@ -77,16 +34,10 @@ class CpfpRepository {
return false;
}
const queries: { query, params }[] = [];
const maxChunk = 100;
let chunkIndex = 0;
// insert transactions in batches of up to 100 rows
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
chunkIndex = 0;
// insert clusters in batches of up to 100 rows
while (chunkIndex < clusterValues.length) {
const chunk = clusterValues.slice(chunkIndex, chunkIndex + maxChunk);
@ -98,12 +49,23 @@ class CpfpRepository {
return (' (UNHEX(?), ?, ?, ?)');
}) + ';';
const values = chunk.flat();
await DB.query(
queries.push({
query,
values
);
params: values,
});
chunkIndex += maxChunk;
}
chunkIndex = 0;
// insert transactions in batches of up to 100 rows
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
queries.push(transactionRepository.buildBatchSetQuery(chunk));
chunkIndex += maxChunk;
}
await DB.$atomicQuery(queries);
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp clusters into db. Reason: ` + (e instanceof Error ? e.message : e));
@ -111,7 +73,7 @@ class CpfpRepository {
}
}
public async $getCluster(clusterRoot: string): Promise<Cluster | void> {
public async $getCluster(clusterRoot: string): Promise<CpfpCluster | void> {
const [clusterRows]: any = await DB.query(
`
SELECT *
@ -122,6 +84,7 @@ class CpfpRepository {
);
const cluster = clusterRows[0];
if (cluster?.txs) {
cluster.effectiveFeePerVsize = cluster.fee_rate;
cluster.txs = this.unpack(cluster.txs);
return cluster;
}

View file

@ -220,7 +220,7 @@ class HashratesRepository {
* Delete hashrates from the database from timestamp
*/
public async $deleteHashratesFromTimestamp(timestamp: number) {
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`);
logger.info(`Delete newer hashrates from timestamp ${new Date(timestamp * 1000).toUTCString()} from the database`, logger.tags.mining);
try {
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp >= FROM_UNIXTIME(?)`, [timestamp]);

View file

@ -39,7 +39,8 @@ class PoolsRepository {
pools.name AS name,
pools.link AS link,
slug,
AVG(blocks_audits.match_rate) AS avgMatchRate
AVG(blocks_audits.match_rate) AS avgMatchRate,
AVG((CAST(blocks.fees as SIGNED) - CAST(blocks_audits.expected_fees as SIGNED)) / NULLIF(CAST(blocks_audits.expected_fees as SIGNED), 0)) AS avgFeeDelta
FROM blocks
JOIN pools on pools.id = pool_id
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height

View file

@ -160,7 +160,7 @@ class PricesRepository {
// Compute fiat exchange rates
let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
if (!latestPrice || latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}

View file

@ -25,9 +25,8 @@ class TransactionRepository {
}
}
public async $batchSetCluster(txs): Promise<void> {
try {
let query = `
public buildBatchSetQuery(txs: { txid: string, cluster: string }[]): { query, params } {
let query = `
INSERT IGNORE INTO compact_transactions
(
txid,
@ -35,13 +34,22 @@ class TransactionRepository {
)
VALUES
`;
query += txs.map(tx => {
return (' (UNHEX(?), UNHEX(?))');
}) + ';';
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
query += txs.map(tx => {
return (' (UNHEX(?), UNHEX(?))');
}) + ';';
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
return {
query,
params: values,
};
}
public async $batchSetCluster(txs): Promise<void> {
try {
const query = this.buildBatchSetQuery(txs);
await DB.query(
query,
values
query.query,
query.params,
);
} catch (e: any) {
logger.err(`Cannot save cpfp transactions into db. Reason: ` + (e instanceof Error ? e.message : e));
@ -105,6 +113,7 @@ class TransactionRepository {
return {
descendants,
ancestors,
effectiveFeePerVsize: cluster.effectiveFeePerVsize,
};
}
}

View file

@ -27,7 +27,7 @@ class ForensicsService {
private async $runTasks(): Promise<void> {
try {
logger.info(`Running forensics scans`);
logger.debug(`Running forensics scans`);
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics(false);
@ -73,7 +73,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
logger.debug(`Started running closed channel forensics...`);
let channels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
@ -152,11 +152,11 @@ class ForensicsService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
logger.debug(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
logger.debug(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
@ -217,7 +217,7 @@ class ForensicsService {
let progress = 0;
try {
logger.info(`Started running open channel forensics...`);
logger.debug(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
for (const openChannel of channels) {
@ -257,7 +257,7 @@ class ForensicsService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating opened channel forensics ${progress}/${channels?.length}`);
logger.debug(`Updating opened channel forensics ${progress}/${channels?.length}`);
this.loggerTimer = new Date().getTime() / 1000;
this.truncateTempCache();
}
@ -266,7 +266,7 @@ class ForensicsService {
}
}
logger.info(`Open channels forensics scan complete.`);
logger.debug(`Open channels forensics scan complete.`);
} catch (e) {
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
} finally {

View file

@ -3,7 +3,6 @@ import logger from '../../logger';
import channelsApi from '../../api/explorer/channels.api';
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
import config from '../../config';
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
import { $lookupNodeLocation } from './sync-tasks/node-locations';
import lightningApi from '../../api/lightning/lightning-api-factory';
@ -269,7 +268,11 @@ class NetworkSyncService {
}
private async $scanForClosedChannels(): Promise<void> {
if (this.closedChannelsScanBlock === blocks.getCurrentBlockHeight()) {
let currentBlockHeight = blocks.getCurrentBlockHeight();
if (config.MEMPOOL.ENABLED === false) { // https://github.com/mempool/mempool/issues/3582
currentBlockHeight = await bitcoinApi.$getBlockHeightTip();
}
if (this.closedChannelsScanBlock === currentBlockHeight) {
logger.debug(`We've already scan closed channels for this block, skipping.`);
return;
}
@ -283,7 +286,7 @@ class NetworkSyncService {
} else {
log += ` for the first time`;
}
logger.info(`${log}`, logger.tags.ln);
logger.debug(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {
@ -300,12 +303,12 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
logger.debug(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
this.closedChannelsScanBlock = currentBlockHeight;
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
} catch (e) {
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);

View file

@ -22,12 +22,15 @@ class LightningStatsUpdater {
* Update the latest entry for each node every config.LIGHTNING.STATS_REFRESH_INTERVAL seconds
*/
private async $logStatsDaily(): Promise<void> {
const date = new Date();
Common.setDateMidnight(date);
const networkGraph = await lightningApi.$getNetworkGraph();
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
logger.debug(`Updated latest network stats`, logger.tags.ln);
try {
const date = new Date();
Common.setDateMidnight(date);
const networkGraph = await lightningApi.$getNetworkGraph();
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
logger.debug(`Updated latest network stats`, logger.tags.ln);
} catch (e) {
logger.err(`Exception in $logStatsDaily. Reason: ${(e instanceof Error ? e.message : e)}`);
}
}
}

View file

@ -15,16 +15,20 @@ class LightningStatsImporter {
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
async $run(): Promise<void> {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
try {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
return;
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
} catch (e) {
logger.err(`Exception in LightningStatsImporter::$run(). ${e}`);
}
await this.$importHistoricalLightningStats();
await this.$cleanupIncorrectSnapshot();
}
/**

View file

@ -17,7 +17,9 @@ class PoolsUpdater {
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
public async updatePoolsJson(): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false ||
config.MEMPOOL.ENABLED === false
) {
return;
}
@ -60,7 +62,7 @@ class PoolsUpdater {
if (this.currentSha === null) {
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
} else {
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
logger.warn(`pools-v2.json is outdated, fetching latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
@ -69,7 +71,7 @@ class PoolsUpdater {
poolsParser.setMiningPools(poolsJson);
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools-v2.json import completed (no database)');
logger.info(`Mining pools-v2.json (${githubSha}) import completed (no database)`);
return;
}
@ -82,7 +84,7 @@ class PoolsUpdater {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;');
}
logger.info('PoolsUpdater completed');
logger.info(`Mining pools-v2.json (${githubSha}) import completed`);
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week

View file

@ -73,6 +73,11 @@ class PriceUpdater {
}
public async $run(): Promise<void> {
if (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet') {
// Coins have no value on testnet/signet, so we want to always show 0
return;
}
if (this.running === true) {
return;
}
@ -88,7 +93,7 @@ class PriceUpdater {
if (this.historyInserted === false && config.DATABASE.ENABLED === true) {
await this.$insertHistoricalPrices();
}
} catch (e) {
} catch (e: any) {
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
@ -148,6 +153,7 @@ class PriceUpdater {
try {
const p = 60 * 60 * 1000; // milliseconds in an hour
const nowRounded = new Date(Math.round(new Date().getTime() / p) * p); // https://stackoverflow.com/a/28037042
this.latestPrices.time = nowRounded.getTime() / 1000;
await PricesRepository.$savePrices(nowRounded.getTime() / 1000, this.latestPrices);
} catch (e) {
this.lastRun = previousRun + 5 * 60;
@ -217,7 +223,7 @@ class PriceUpdater {
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
logger.debug(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
const historicalPrices: PriceHistory[] = [];

View file

@ -26,4 +26,70 @@ export function formatBytes(bytes: number, toUnit: string, skipUnit = false): st
}
return `${bytes.toFixed(2)}${skipUnit ? '' : ' ' + byteUnits[unitIndex]}`;
}
// https://stackoverflow.com/a/64235212
export function hex2bin(hex: string): string {
if (!hex) {
return '';
}
hex = hex.replace('0x', '').toLowerCase();
let out = '';
for (const c of hex) {
switch (c) {
case '0': out += '0000'; break;
case '1': out += '0001'; break;
case '2': out += '0010'; break;
case '3': out += '0011'; break;
case '4': out += '0100'; break;
case '5': out += '0101'; break;
case '6': out += '0110'; break;
case '7': out += '0111'; break;
case '8': out += '1000'; break;
case '9': out += '1001'; break;
case 'a': out += '1010'; break;
case 'b': out += '1011'; break;
case 'c': out += '1100'; break;
case 'd': out += '1101'; break;
case 'e': out += '1110'; break;
case 'f': out += '1111'; break;
default: return '';
}
}
return out;
}
export function bin2hex(bin: string): string {
if (!bin) {
return '';
}
let out = '';
for (let i = 0; i < bin.length; i += 4) {
const c = bin.substring(i, i + 4);
switch (c) {
case '0000': out += '0'; break;
case '0001': out += '1'; break;
case '0010': out += '2'; break;
case '0011': out += '3'; break;
case '0100': out += '4'; break;
case '0101': out += '5'; break;
case '0110': out += '6'; break;
case '0111': out += '7'; break;
case '1000': out += '8'; break;
case '1001': out += '9'; break;
case '1010': out += 'a'; break;
case '1011': out += 'b'; break;
case '1100': out += 'c'; break;
case '1101': out += 'd'; break;
case '1110': out += 'e'; break;
case '1111': out += 'f'; break;
default: return '';
}
}
return out;
}

View file

@ -5,6 +5,7 @@
"types": ["node", "jest"],
"lib": ["es2019", "dom"],
"strict": true,
"skipLibCheck": true,
"noImplicitAny": false,
"sourceMap": false,
"outDir": "dist",

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 24, 2023.
Signed: 0xflicker

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 28, 2023.
Signed: bennyhodl

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: joostjager

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 7, 203.
Signed: learntheropes

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 8, 2023.
Signed: nothing0012

3
contributors/pfoytik.txt Normal file
View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 15, 2023.
Signed pfoytik

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 14, 2023.
Signed: secondl1ght

View file

@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: vostrnad

View file

@ -34,6 +34,7 @@ If you want to use different credentials, specify them in the `docker-compose.ym
CORE_RPC_PORT: "8332"
CORE_RPC_USERNAME: "customuser"
CORE_RPC_PASSWORD: "custompassword"
CORE_RPC_TIMEOUT: "60000"
```
The IP address in the example above refers to Docker's default gateway IP address so that the container can hit the `bitcoind` instance running on the host machine. If your setup is different, update it accordingly.
@ -112,6 +113,7 @@ Below we list all settings from `mempool-config.json` and the corresponding over
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false,
"MAX_BLOCKS_BULK_QUERY": 0,
"DISK_CACHE_BLOCK_INTERVAL": 6
},
```
@ -142,7 +144,8 @@ Corresponding `docker-compose.yml` overrides:
MEMPOOL_ADVANCED_GBT_AUDIT: ""
MEMPOOL_ADVANCED_GBT_MEMPOOL: ""
MEMPOOL_CPFP_INDEXING: ""
MAX_BLOCKS_BULK_QUERY: ""
MEMPOOL_MAX_BLOCKS_BULK_QUERY: ""
MEMPOOL_DISK_CACHE_BLOCK_INTERVAL: ""
...
```
@ -158,7 +161,8 @@ Corresponding `docker-compose.yml` overrides:
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 60000
},
```
@ -170,6 +174,7 @@ Corresponding `docker-compose.yml` overrides:
CORE_RPC_PORT: ""
CORE_RPC_USERNAME: ""
CORE_RPC_PASSWORD: ""
CORE_RPC_TIMEOUT: 60000
...
```
@ -199,7 +204,9 @@ Corresponding `docker-compose.yml` overrides:
`mempool-config.json`:
```json
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
"REST_API_URL": "http://127.0.0.1:3000",
"UNIX_SOCKET_PATH": "/tmp/esplora-socket",
"RETRY_UNIX_SOCKET_AFTER": 30000
},
```
@ -208,6 +215,8 @@ Corresponding `docker-compose.yml` overrides:
api:
environment:
ESPLORA_REST_API_URL: ""
ESPLORA_UNIX_SOCKET_PATH: ""
ESPLORA_RETRY_UNIX_SOCKET_AFTER: ""
...
```
@ -219,7 +228,8 @@ Corresponding `docker-compose.yml` overrides:
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
"PASSWORD": "mempool"
"PASSWORD": "mempool",
"TIMEOUT": 60000
},
```
@ -231,6 +241,7 @@ Corresponding `docker-compose.yml` overrides:
SECOND_CORE_RPC_PORT: ""
SECOND_CORE_RPC_USERNAME: ""
SECOND_CORE_RPC_PASSWORD: ""
SECOND_CORE_RPC_TIMEOUT: ""
...
```
@ -258,6 +269,7 @@ Corresponding `docker-compose.yml` overrides:
DATABASE_DATABASE: ""
DATABASE_USERNAME: ""
DATABASE_PASSWORD: ""
DATABASE_TIMEOUT: ""
...
```
@ -403,6 +415,7 @@ Corresponding `docker-compose.yml` overrides:
"TLS_CERT_PATH": ""
"MACAROON_PATH": ""
"REST_API_URL": "https://localhost:8080"
"TIMEOUT": 10000
}
```
@ -413,6 +426,7 @@ Corresponding `docker-compose.yml` overrides:
LND_TLS_CERT_PATH: ""
LND_MACAROON_PATH: ""
LND_REST_API_URL: "https://localhost:8080"
LND_TIMEOUT: 10000
...
```
@ -432,3 +446,26 @@ Corresponding `docker-compose.yml` overrides:
CLIGHTNING_SOCKET: ""
...
```
<br/>
`mempool-config.json`:
```json
"MAXMIND": {
"ENABLED": true,
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
}
```
Corresponding `docker-compose.yml` overrides:
```yaml
api:
environment:
MAXMIND_ENABLED: true,
MAXMIND_GEOLITE2_CITY: "/backend/GeoIP/GeoLite2-City.mmdb",
MAXMIND_GEOLITE2_ASN": "/backend/GeoIP/GeoLite2-ASN.mmdb",
MAXMIND_GEOIP2_ISP": "/backend/GeoIP/GeoIP2-ISP.mmdb"
...
```

View file

@ -7,7 +7,12 @@ WORKDIR /build
COPY . .
RUN apt-get update
RUN apt-get install -y build-essential python3 pkg-config
RUN apt-get install -y build-essential python3 pkg-config curl
# Install Rust via rustup
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
ENV PATH="/root/.cargo/bin:$PATH"
RUN npm install --omit=dev --omit=optional
RUN npm run package
@ -17,6 +22,7 @@ WORKDIR /backend
RUN chown 1000:1000 ./
COPY --from=builder --chown=1000:1000 /build/package ./package/
COPY --from=builder --chown=1000:1000 /build/GeoIP ./GeoIP/
COPY --from=builder --chown=1000:1000 /build/mempool-config.json /build/start.sh /build/wait-for-it.sh ./
USER 1000

View file

@ -25,14 +25,21 @@
"AUDIT": __MEMPOOL_AUDIT__,
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
"RUST_GBT": __MEMPOOL_RUST_GBT__,
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__,
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__,
"MAX_PUSH_TX_SIZE_WEIGHT": __MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__,
"ALLOW_UNREACHABLE": __MEMPOOL_ALLOW_UNREACHABLE__,
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
"PORT": __CORE_RPC_PORT__,
"USERNAME": "__CORE_RPC_USERNAME__",
"PASSWORD": "__CORE_RPC_PASSWORD__"
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": __CORE_RPC_TIMEOUT__
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",
@ -40,13 +47,16 @@
"TLS_ENABLED": __ELECTRUM_TLS_ENABLED__
},
"ESPLORA": {
"REST_API_URL": "__ESPLORA_REST_API_URL__"
"REST_API_URL": "__ESPLORA_REST_API_URL__",
"UNIX_SOCKET_PATH": "__ESPLORA_UNIX_SOCKET_PATH__",
"RETRY_UNIX_SOCKET_AFTER": __ESPLORA_RETRY_UNIX_SOCKET_AFTER__
},
"SECOND_CORE_RPC": {
"HOST": "__SECOND_CORE_RPC_HOST__",
"PORT": __SECOND_CORE_RPC_PORT__,
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__"
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__",
"TIMEOUT": __SECOND_CORE_RPC_TIMEOUT__
},
"DATABASE": {
"ENABLED": __DATABASE_ENABLED__,
@ -55,7 +65,8 @@
"PORT": __DATABASE_PORT__,
"DATABASE": "__DATABASE_DATABASE__",
"USERNAME": "__DATABASE_USERNAME__",
"PASSWORD": "__DATABASE_PASSWORD__"
"PASSWORD": "__DATABASE_PASSWORD__",
"TIMEOUT": __DATABASE_TIMEOUT__
},
"SYSLOG": {
"ENABLED": __SYSLOG_ENABLED__,
@ -78,12 +89,15 @@
"STATS_REFRESH_INTERVAL": __LIGHTNING_STATS_REFRESH_INTERVAL__,
"GRAPH_REFRESH_INTERVAL": __LIGHTNING_GRAPH_REFRESH_INTERVAL__,
"LOGGER_UPDATE_INTERVAL": __LIGHTNING_LOGGER_UPDATE_INTERVAL__,
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__"
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"FORENSICS_INTERVAL": __LIGHTNING_FORENSICS_INTERVAL__,
"FORENSICS_RATE_LIMIT": __LIGHTNING_FORENSICS_RATE_LIMIT__
},
"LND": {
"TLS_CERT_PATH": "__LND_TLS_CERT_PATH__",
"MACAROON_PATH": "__LND_MACAROON_PATH__",
"REST_API_URL": "__LND_REST_API_URL__"
"REST_API_URL": "__LND_REST_API_URL__",
"TIMEOUT": __LND_TIMEOUT__
},
"CLIGHTNING": {
"SOCKET": "__CLIGHTNING_SOCKET__"
@ -107,5 +121,17 @@
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"MAXMIND": {
"ENABLED": __MAXMIND_ENABLED__,
"GEOLITE2_CITY": "__MAXMIND_GEOLITE2_CITY__",
"GEOLITE2_ASN": "__MAXMIND_GEOLITE2_ASN__",
"GEOIP2_ISP": "__MAXMIND_GEOIP2_ISP__"
},
"REPLICATION": {
"ENABLED": __REPLICATION_ENABLED__,
"AUDIT": __REPLICATION_AUDIT__,
"AUDIT_START_HEIGHT": __REPLICATION_AUDIT_START_HEIGHT__,
"SERVERS": __REPLICATION_SERVERS__
}
}
}

View file

@ -22,21 +22,26 @@ __MEMPOOL_EXTERNAL_MAX_RETRY__=${MEMPOOL_EXTERNAL_MAX_RETRY:=1}
__MEMPOOL_EXTERNAL_RETRY_INTERVAL__=${MEMPOOL_EXTERNAL_RETRY_INTERVAL:=0}
__MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json}
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.github.com/repos/mempool/mining-pools/git/trees/master}
__MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
__MEMPOOL_RUST_GBT__=${MEMPOOL_RUST_GBT:=false}
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__=${MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT:=4000000}
__MEMPOOL_ALLOW_UNREACHABLE__=${MEMPOOL_ALLOW_UNREACHABLE:=true}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
__CORE_RPC_PORT__=${CORE_RPC_PORT:=8332}
__CORE_RPC_USERNAME__=${CORE_RPC_USERNAME:=mempool}
__CORE_RPC_PASSWORD__=${CORE_RPC_PASSWORD:=mempool}
__CORE_RPC_TIMEOUT__=${CORE_RPC_TIMEOUT:=60000}
# ELECTRUM
__ELECTRUM_HOST__=${ELECTRUM_HOST:=127.0.0.1}
@ -45,12 +50,15 @@ __ELECTRUM_TLS_ENABLED__=${ELECTRUM_TLS_ENABLED:=false}
# ESPLORA
__ESPLORA_REST_API_URL__=${ESPLORA_REST_API_URL:=http://127.0.0.1:3000}
__ESPLORA_UNIX_SOCKET_PATH__=${ESPLORA_UNIX_SOCKET_PATH:="null"}
__ESPLORA_RETRY_UNIX_SOCKET_AFTER__=${ESPLORA_RETRY_UNIX_SOCKET_AFTER:=30000}
# SECOND_CORE_RPC
__SECOND_CORE_RPC_HOST__=${SECOND_CORE_RPC_HOST:=127.0.0.1}
__SECOND_CORE_RPC_PORT__=${SECOND_CORE_RPC_PORT:=8332}
__SECOND_CORE_RPC_USERNAME__=${SECOND_CORE_RPC_USERNAME:=mempool}
__SECOND_CORE_RPC_PASSWORD__=${SECOND_CORE_RPC_PASSWORD:=mempool}
__SECOND_CORE_RPC_TIMEOUT__=${SECOND_CORE_RPC_TIMEOUT:=60000}
# DATABASE
__DATABASE_ENABLED__=${DATABASE_ENABLED:=true}
@ -60,6 +68,7 @@ __DATABASE_PORT__=${DATABASE_PORT:=3306}
__DATABASE_DATABASE__=${DATABASE_DATABASE:=mempool}
__DATABASE_USERNAME__=${DATABASE_USERNAME:=mempool}
__DATABASE_PASSWORD__=${DATABASE_PASSWORD:=mempool}
__DATABASE_TIMEOUT__=${DATABASE_TIMEOUT:=180000}
# SYSLOG
__SYSLOG_ENABLED__=${SYSLOG_ENABLED:=false}
@ -103,91 +112,114 @@ __LIGHTNING_TOPOLOGY_FOLDER__=${LIGHTNING_TOPOLOGY_FOLDER:=""}
__LIGHTNING_STATS_REFRESH_INTERVAL__=${LIGHTNING_STATS_REFRESH_INTERVAL:=600}
__LIGHTNING_GRAPH_REFRESH_INTERVAL__=${LIGHTNING_GRAPH_REFRESH_INTERVAL:=600}
__LIGHTNING_LOGGER_UPDATE_INTERVAL__=${LIGHTNING_LOGGER_UPDATE_INTERVAL:=30}
__LIGHTNING_FORENSICS_INTERVAL__=${LIGHTNING_FORENSICS_INTERVAL:=43200}
__LIGHTNING_FORENSICS_RATE_LIMIT__=${LIGHTNING_FORENSICS_RATE_LIMIT:=20}
# LND
__LND_TLS_CERT_PATH__=${LND_TLS_CERT_PATH:=""}
__LND_MACAROON_PATH__=${LND_MACAROON_PATH:=""}
__LND_REST_API_URL__=${LND_REST_API_URL:="https://localhost:8080"}
__LND_TIMEOUT__=${LND_TIMEOUT:=10000}
# CLN
__CLIGHTNING_SOCKET__=${CLIGHTNING_SOCKET:=""}
# MAXMIND
__MAXMIND_ENABLED__=${MAXMIND_ENABLED:=true}
__MAXMIND_GEOLITE2_CITY__=${MAXMIND_GEOLITE2_CITY:="/backend/GeoIP/GeoLite2-City.mmdb"}
__MAXMIND_GEOLITE2_ASN__=${MAXMIND_GEOLITE2_ASN:="/backend/GeoIP/GeoLite2-ASN.mmdb"}
__MAXMIND_GEOIP2_ISP__=${MAXMIND_GEOIP2_ISP:=""}
# REPLICATION
__REPLICATION_ENABLED__=${REPLICATION_ENABLED:=true}
__REPLICATION_AUDIT__=${REPLICATION_AUDIT:=true}
__REPLICATION_AUDIT_START_HEIGHT__=${REPLICATION_AUDIT_START_HEIGHT:=774000}
__REPLICATION_SERVERS__=${REPLICATION_SERVERS:=[]}
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json
sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json
sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_NETWORK__!${__MEMPOOL_NETWORK__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BACKEND__!${__MEMPOOL_BACKEND__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ENABLED__!${__MEMPOOL_ENABLED__}!g" mempool-config.json
sed -i "s!__MEMPOOL_HTTP_PORT__!${__MEMPOOL_HTTP_PORT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_SPAWN_CLUSTER_PROCS__!${__MEMPOOL_SPAWN_CLUSTER_PROCS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
sed -i "s/__MEMPOOL_POLL_RATE_MS__/${__MEMPOOL_POLL_RATE_MS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_POLL_RATE_MS__!${__MEMPOOL_POLL_RATE_MS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CACHE_DIR__!${__MEMPOOL_CACHE_DIR__}!g" mempool-config.json
sed -i "s/__MEMPOOL_CLEAR_PROTECTION_MINUTES__/${__MEMPOOL_CLEAR_PROTECTION_MINUTES__}/g" mempool-config.json
sed -i "s/__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__/${__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BLOCK_WEIGHT_UNITS__/${__MEMPOOL_BLOCK_WEIGHT_UNITS__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INITIAL_BLOCKS_AMOUNT__/${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__/${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__/${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}/g" mempool-config.json
sed -i "s/__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__/${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}/g" mempool-config.json
sed -i "s!__MEMPOOL_CLEAR_PROTECTION_MINUTES__!${__MEMPOOL_CLEAR_PROTECTION_MINUTES__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__!${__MEMPOOL_RECOMMENDED_FEE_PERCENTILE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BLOCK_WEIGHT_UNITS__!${__MEMPOOL_BLOCK_WEIGHT_UNITS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_INITIAL_BLOCKS_AMOUNT__!${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__!${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_INDEXING_BLOCKS_AMOUNT__!${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__!${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__!${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_ASSETS__!${__MEMPOOL_EXTERNAL_ASSETS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
sed -i "s!__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__!${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__!${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_GBT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__!${__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__!${__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ALLOW_UNREACHABLE__!${__MEMPOOL_ALLOW_UNREACHABLE__}!g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
sed -i "s/__CORE_RPC_USERNAME__/${__CORE_RPC_USERNAME__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PASSWORD__/${__CORE_RPC_PASSWORD__}/g" mempool-config.json
sed -i "s!__CORE_RPC_HOST__!${__CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PORT__!${__CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__CORE_RPC_USERNAME__!${__CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__CORE_RPC_PASSWORD__!${__CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__CORE_RPC_TIMEOUT__!${__CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s/__ELECTRUM_HOST__/${__ELECTRUM_HOST__}/g" mempool-config.json
sed -i "s/__ELECTRUM_PORT__/${__ELECTRUM_PORT__}/g" mempool-config.json
sed -i "s/__ELECTRUM_TLS_ENABLED__/${__ELECTRUM_TLS_ENABLED__}/g" mempool-config.json
sed -i "s!__ELECTRUM_HOST__!${__ELECTRUM_HOST__}!g" mempool-config.json
sed -i "s!__ELECTRUM_PORT__!${__ELECTRUM_PORT__}!g" mempool-config.json
sed -i "s!__ELECTRUM_TLS_ENABLED__!${__ELECTRUM_TLS_ENABLED__}!g" mempool-config.json
sed -i "s!__ESPLORA_REST_API_URL__!${__ESPLORA_REST_API_URL__}!g" mempool-config.json
sed -i "s!__ESPLORA_UNIX_SOCKET_PATH__!${__ESPLORA_UNIX_SOCKET_PATH__}!g" mempool-config.json
sed -i "s!__ESPLORA_RETRY_UNIX_SOCKET_AFTER__!${__ESPLORA_RETRY_UNIX_SOCKET_AFTER__}!g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_HOST__/${__SECOND_CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_PORT__/${__SECOND_CORE_RPC_PORT__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_USERNAME__/${__SECOND_CORE_RPC_USERNAME__}/g" mempool-config.json
sed -i "s/__SECOND_CORE_RPC_PASSWORD__/${__SECOND_CORE_RPC_PASSWORD__}/g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_HOST__!${__SECOND_CORE_RPC_HOST__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PORT__!${__SECOND_CORE_RPC_PORT__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_USERNAME__!${__SECOND_CORE_RPC_USERNAME__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_PASSWORD__!${__SECOND_CORE_RPC_PASSWORD__}!g" mempool-config.json
sed -i "s!__SECOND_CORE_RPC_TIMEOUT__!${__SECOND_CORE_RPC_TIMEOUT__}!g" mempool-config.json
sed -i "s/__DATABASE_ENABLED__/${__DATABASE_ENABLED__}/g" mempool-config.json
sed -i "s/__DATABASE_HOST__/${__DATABASE_HOST__}/g" mempool-config.json
sed -i "s!__DATABASE_ENABLED__!${__DATABASE_ENABLED__}!g" mempool-config.json
sed -i "s!__DATABASE_HOST__!${__DATABASE_HOST__}!g" mempool-config.json
sed -i "s!__DATABASE_SOCKET__!${__DATABASE_SOCKET__}!g" mempool-config.json
sed -i "s!__DATABASE_PORT__!${__DATABASE_PORT__}!g" mempool-config.json
sed -i "s!__DATABASE_DATABASE__!${__DATABASE_DATABASE__}!g" mempool-config.json
sed -i "s!__DATABASE_USERNAME__!${__DATABASE_USERNAME__}!g" mempool-config.json
sed -i "s!__DATABASE_PASSWORD__!${__DATABASE_PASSWORD__}!g" mempool-config.json
sed -i "s!__DATABASE_TIMEOUT__!${__DATABASE_TIMEOUT__}!g" mempool-config.json
sed -i "s/__DATABASE_PORT__/${__DATABASE_PORT__}/g" mempool-config.json
sed -i "s/__DATABASE_DATABASE__/${__DATABASE_DATABASE__}/g" mempool-config.json
sed -i "s/__DATABASE_USERNAME__/${__DATABASE_USERNAME__}/g" mempool-config.json
sed -i "s/__DATABASE_PASSWORD__/${__DATABASE_PASSWORD__}/g" mempool-config.json
sed -i "s!__SYSLOG_ENABLED__!${__SYSLOG_ENABLED__}!g" mempool-config.json
sed -i "s!__SYSLOG_HOST__!${__SYSLOG_HOST__}!g" mempool-config.json
sed -i "s!__SYSLOG_PORT__!${__SYSLOG_PORT__}!g" mempool-config.json
sed -i "s!__SYSLOG_MIN_PRIORITY__!${__SYSLOG_MIN_PRIORITY__}!g" mempool-config.json
sed -i "s!__SYSLOG_FACILITY__!${__SYSLOG_FACILITY__}!g" mempool-config.json
sed -i "s/__SYSLOG_ENABLED__/${__SYSLOG_ENABLED__}/g" mempool-config.json
sed -i "s/__SYSLOG_HOST__/${__SYSLOG_HOST__}/g" mempool-config.json
sed -i "s/__SYSLOG_PORT__/${__SYSLOG_PORT__}/g" mempool-config.json
sed -i "s/__SYSLOG_MIN_PRIORITY__/${__SYSLOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__SYSLOG_FACILITY__/${__SYSLOG_FACILITY__}/g" mempool-config.json
sed -i "s!__STATISTICS_ENABLED__!${__STATISTICS_ENABLED__}!g" mempool-config.json
sed -i "s!__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__!${__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__}!g" mempool-config.json
sed -i "s/__STATISTICS_ENABLED__/${__STATISTICS_ENABLED__}/g" mempool-config.json
sed -i "s/__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__/${__STATISTICS_TX_PER_SECOND_SAMPLE_PERIOD__}/g" mempool-config.json
sed -i "s/__BISQ_ENABLED__/${__BISQ_ENABLED__}/g" mempool-config.json
sed -i "s!__BISQ_ENABLED__!${__BISQ_ENABLED__}!g" mempool-config.json
sed -i "s!__BISQ_DATA_PATH__!${__BISQ_DATA_PATH__}!g" mempool-config.json
sed -i "s/__SOCKS5PROXY_ENABLED__/${__SOCKS5PROXY_ENABLED__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_USE_ONION__/${__SOCKS5PROXY_USE_ONION__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_HOST__/${__SOCKS5PROXY_HOST__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_PORT__/${__SOCKS5PROXY_PORT__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_USERNAME__/${__SOCKS5PROXY_USERNAME__}/g" mempool-config.json
sed -i "s/__SOCKS5PROXY_PASSWORD__/${__SOCKS5PROXY_PASSWORD__}/g" mempool-config.json
sed -i "s!__SOCKS5PROXY_ENABLED__!${__SOCKS5PROXY_ENABLED__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_USE_ONION__!${__SOCKS5PROXY_USE_ONION__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_HOST__!${__SOCKS5PROXY_HOST__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_PORT__!${__SOCKS5PROXY_PORT__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_USERNAME__!${__SOCKS5PROXY_USERNAME__}!g" mempool-config.json
sed -i "s!__SOCKS5PROXY_PASSWORD__!${__SOCKS5PROXY_PASSWORD__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_TOR_URL__!${__PRICE_DATA_SERVER_TOR_URL__}!g" mempool-config.json
sed -i "s!__PRICE_DATA_SERVER_CLEARNET_URL__!${__PRICE_DATA_SERVER_CLEARNET_URL__}!g" mempool-config.json
@ -206,13 +238,28 @@ sed -i "s!__LIGHTNING_TOPOLOGY_FOLDER__!${__LIGHTNING_TOPOLOGY_FOLDER__}!g" memp
sed -i "s!__LIGHTNING_STATS_REFRESH_INTERVAL__!${__LIGHTNING_STATS_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_GRAPH_REFRESH_INTERVAL__!${__LIGHTNING_GRAPH_REFRESH_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_LOGGER_UPDATE_INTERVAL__!${__LIGHTNING_LOGGER_UPDATE_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_FORENSICS_INTERVAL__!${__LIGHTNING_FORENSICS_INTERVAL__}!g" mempool-config.json
sed -i "s!__LIGHTNING_FORENSICS_RATE_LIMIT__!${__LIGHTNING_FORENSICS_RATE_LIMIT__}!g" mempool-config.json
# LND
sed -i "s!__LND_TLS_CERT_PATH__!${__LND_TLS_CERT_PATH__}!g" mempool-config.json
sed -i "s!__LND_MACAROON_PATH__!${__LND_MACAROON_PATH__}!g" mempool-config.json
sed -i "s!__LND_REST_API_URL__!${__LND_REST_API_URL__}!g" mempool-config.json
sed -i "s!__LND_TIMEOUT__!${__LND_TIMEOUT__}!g" mempool-config.json
# CLN
sed -i "s!__CLIGHTNING_SOCKET__!${__CLIGHTNING_SOCKET__}!g" mempool-config.json
# MAXMIND
sed -i "s!__MAXMIND_ENABLED__!${__MAXMIND_ENABLED__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOLITE2_CITY__!${__MAXMIND_GEOLITE2_CITY__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOLITE2_ASN__!${__MAXMIND_GEOLITE2_ASN__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOIP2_ISP__!${__MAXMIND_GEOIP2_ISP__}!g" mempool-config.json
# REPLICATION
sed -i "s!__REPLICATION_ENABLED__!${__REPLICATION_ENABLED__}!g" mempool-config.json
sed -i "s!__REPLICATION_AUDIT__!${__REPLICATION_AUDIT__}!g" mempool-config.json
sed -i "s!__REPLICATION_AUDIT_START_HEIGHT__!${__REPLICATION_AUDIT_START_HEIGHT__}!g" mempool-config.json
sed -i "s!__REPLICATION_SERVERS__!${__REPLICATION_SERVERS__}!g" mempool-config.json
node /backend/package/index.js

View file

@ -10,6 +10,10 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf
sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf
cat /patch/nginx.conf > /etc/nginx/nginx.conf
if [ "${LIGHTNING_DETECTED_PORT}" != "" ];then
export LIGHTNING=true
fi
# Runtime overrides - read env vars defined in docker compose
__TESTNET_ENABLED__=${TESTNET_ENABLED:=false}

Some files were not shown because too many files have changed in this diff Show more