mirror of
https://github.com/mempool/mempool.git
synced 2025-02-22 06:21:46 +01:00
Merge branch 'master' into hunicus/msop-r
This commit is contained in:
commit
1ca99e9967
250 changed files with 20681 additions and 9896 deletions
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
|
@ -7,7 +7,8 @@ updates:
|
|||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
update-types:
|
||||
["version-update:semver-major", "version-update:semver-patch"]
|
||||
allow:
|
||||
- dependency-type: "production"
|
||||
|
||||
|
@ -18,7 +19,8 @@ updates:
|
|||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
update-types:
|
||||
["version-update:semver-major", "version-update:semver-patch"]
|
||||
allow:
|
||||
- dependency-type: "production"
|
||||
|
||||
|
@ -28,7 +30,8 @@ updates:
|
|||
interval: weekly
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
update-types:
|
||||
["version-update:semver-major", "version-update:semver-patch"]
|
||||
|
||||
- package-ecosystem: docker
|
||||
directory: "/docker/frontend"
|
||||
|
@ -36,7 +39,8 @@ updates:
|
|||
interval: weekly
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
update-types:
|
||||
["version-update:semver-major", "version-update:semver-patch"]
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
|
@ -44,4 +48,5 @@ updates:
|
|||
interval: weekly
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
update-types:
|
||||
["version-update:semver-major", "version-update:semver-patch"]
|
||||
|
|
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["16", "17", "18"]
|
||||
node: ["16", "17", "18", "20"]
|
||||
flavor: ["dev", "prod"]
|
||||
fail-fast: false
|
||||
runs-on: "ubuntu-latest"
|
||||
|
@ -27,6 +27,9 @@ jobs:
|
|||
node-version: ${{ matrix.node }}
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Install 1.70.x Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.70
|
||||
|
||||
- name: Install
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm ci
|
||||
|
@ -55,7 +58,7 @@ jobs:
|
|||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["16", "17", "18"]
|
||||
node: ["16", "17", "18", "20"]
|
||||
flavor: ["dev", "prod"]
|
||||
fail-fast: false
|
||||
runs-on: "ubuntu-latest"
|
||||
|
@ -94,3 +97,6 @@ jobs:
|
|||
- name: Build
|
||||
run: npm run build
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,3 +5,4 @@ backend/mempool-config.json
|
|||
*.swp
|
||||
frontend/src/resources/config.template.js
|
||||
frontend/src/resources/config.js
|
||||
target
|
||||
|
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"editor.tabSize": 2,
|
||||
"typescript.preferences.importModuleSpecifier": "relative",
|
||||
"typescript.tsdk": "./backend/node_modules/typescript/lib"
|
||||
"typescript.tsdk": "./backend/node_modules/typescript/lib",
|
||||
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] }
|
||||
}
|
533
Cargo.lock
generated
Normal file
533
Cargo.lock
generated
Normal file
|
@ -0,0 +1,533 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
|
||||
dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1586fa608b1dab41f667475b4a41faec5ba680aee428bfa5de4ea520fdc6e901"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.20",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gbt"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"bytes",
|
||||
"napi",
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"priority-queue",
|
||||
"tracing",
|
||||
"tracing-log",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "1.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.146"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
|
||||
[[package]]
|
||||
name = "napi"
|
||||
version = "2.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"ctor",
|
||||
"napi-derive",
|
||||
"napi-sys",
|
||||
"once_cell",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-build"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive"
|
||||
version = "2.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"convert_case",
|
||||
"napi-derive-backend",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive-backend"
|
||||
version = "1.0.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20bbc7c69168d06a848f925ec5f0e0997f98e8c8d4f2cc30157f0da51c009e17"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"semver",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-sys"
|
||||
version = "2.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
|
||||
dependencies = [
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
|
||||
|
||||
[[package]]
|
||||
name = "priority-queue"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax 0.7.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.28.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num_cpus",
|
||||
"pin-project-lite",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.20",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
|
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[workspace]
|
||||
members = [
|
||||
"./backend/rust-gbt",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
codegen-units = 1
|
1
backend/.dockerignore
Normal file
1
backend/.dockerignore
Normal file
|
@ -0,0 +1 @@
|
|||
Dockerfile
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
These instructions are mostly intended for developers.
|
||||
|
||||
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool does not provide support for custom setups.
|
||||
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool only provides support for custom setups to [enterprise sponsors](https://mempool.space/enterprise).
|
||||
|
||||
See other ways to set up Mempool on [the main README](/../../#installation-methods).
|
||||
|
||||
|
@ -79,6 +79,8 @@ Query OK, 0 rows affected (0.00 sec)
|
|||
|
||||
_Make sure to use Node.js 16.10 and npm 7._
|
||||
|
||||
_The build process requires [Rust](https://www.rust-lang.org/tools/install) to be installed._
|
||||
|
||||
Install dependencies with `npm` and build the backend:
|
||||
|
||||
```
|
||||
|
|
|
@ -27,8 +27,11 @@
|
|||
"AUDIT": false,
|
||||
"ADVANCED_GBT_AUDIT": false,
|
||||
"ADVANCED_GBT_MEMPOOL": false,
|
||||
"RUST_GBT": false,
|
||||
"CPFP_INDEXING": false,
|
||||
"DISK_CACHE_BLOCK_INTERVAL": 6
|
||||
"DISK_CACHE_BLOCK_INTERVAL": 6,
|
||||
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
|
||||
"ALLOW_UNREACHABLE": true
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "127.0.0.1",
|
||||
|
@ -122,5 +125,16 @@
|
|||
"LIQUID_ONION": "http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1",
|
||||
"BISQ_URL": "https://bisq.markets/api",
|
||||
"BISQ_ONION": "http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api"
|
||||
},
|
||||
"REPLICATION": {
|
||||
"ENABLED": false,
|
||||
"AUDIT": false,
|
||||
"AUDIT_START_HEIGHT": 774000,
|
||||
"SERVERS": [
|
||||
"list",
|
||||
"of",
|
||||
"trusted",
|
||||
"servers"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
359
backend/package-lock.json
generated
359
backend/package-lock.json
generated
|
@ -1,26 +1,26 @@
|
|||
{
|
||||
"name": "mempool-backend",
|
||||
"version": "2.6.0-dev",
|
||||
"version": "3.0.0-dev",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "mempool-backend",
|
||||
"version": "2.6.0-dev",
|
||||
"version": "3.0.0-dev",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.21.3",
|
||||
"@mempool/electrum-client": "1.1.9",
|
||||
"@types/node": "^18.15.3",
|
||||
"axios": "~0.27.2",
|
||||
"bitcoinjs-lib": "~6.1.0",
|
||||
"axios": "~1.4.0",
|
||||
"bitcoinjs-lib": "~6.1.3",
|
||||
"crypto-js": "~4.1.1",
|
||||
"express": "~4.18.2",
|
||||
"maxmind": "~4.3.8",
|
||||
"mysql2": "~3.2.0",
|
||||
"node-worker-threads-pool": "~1.5.1",
|
||||
"maxmind": "~4.3.11",
|
||||
"mysql2": "~3.5.2",
|
||||
"rust-gbt": "file:./rust-gbt",
|
||||
"socks-proxy-agent": "~7.0.0",
|
||||
"typescript": "~4.7.4",
|
||||
"typescript": "~4.9.3",
|
||||
"ws": "~8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -28,19 +28,28 @@
|
|||
"@babel/core": "^7.21.3",
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/express": "^4.17.15",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/jest": "^29.5.0",
|
||||
"@types/ws": "~8.5.4",
|
||||
"@types/ws": "~8.5.5",
|
||||
"@typescript-eslint/eslint-plugin": "^5.55.0",
|
||||
"@typescript-eslint/parser": "^5.55.0",
|
||||
"eslint": "^8.36.0",
|
||||
"eslint-config-prettier": "^8.7.0",
|
||||
"eslint-config-prettier": "^8.8.0",
|
||||
"jest": "^29.5.0",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^29.0.5",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@aashutoshrathi/word-wrap": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
|
||||
"integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ampproject/remapping": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz",
|
||||
|
@ -1485,6 +1494,21 @@
|
|||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/@napi-rs/cli": {
|
||||
"version": "2.16.1",
|
||||
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
|
||||
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
|
||||
"bin": {
|
||||
"napi": "scripts/index.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Brooooooklyn"
|
||||
}
|
||||
},
|
||||
"node_modules/@noble/hashes": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz",
|
||||
|
@ -1778,9 +1802,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.5.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz",
|
||||
"integrity": "sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==",
|
||||
"version": "8.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.5.tgz",
|
||||
"integrity": "sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
|
@ -1848,9 +1872,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -1992,9 +2016,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -2051,9 +2075,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils/node_modules/semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -2238,12 +2262,13 @@
|
|||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.0",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest": {
|
||||
|
@ -2432,9 +2457,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/bitcoinjs-lib": {
|
||||
"version": "6.1.1",
|
||||
"resolved": "https://registry.npmjs.org/bitcoinjs-lib/-/bitcoinjs-lib-6.1.1.tgz",
|
||||
"integrity": "sha512-FYihfgTk29lt1eK2y48OtuarEDUnTprNBW3ctT8yHiOhvmeS3DzAVG6gI0VCvMkydz6UdlXlYNWIPqGD0SUYRQ==",
|
||||
"version": "6.1.3",
|
||||
"resolved": "https://registry.npmjs.org/bitcoinjs-lib/-/bitcoinjs-lib-6.1.3.tgz",
|
||||
"integrity": "sha512-TYXs/Qf+GNk2nnsB9HrXWqzFuEgCg0Gx+v3UW3B8VuceFHXVvhT+7hRnTSvwkX0i8rz2rtujeU6gFaDcFqYFDw==",
|
||||
"dependencies": {
|
||||
"@noble/hashes": "^1.2.0",
|
||||
"bech32": "^2.0.0",
|
||||
|
@ -5350,9 +5375,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/jest-snapshot/node_modules/semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -5875,12 +5900,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/maxmind": {
|
||||
"version": "4.3.9",
|
||||
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.9.tgz",
|
||||
"integrity": "sha512-rEfIxZ9M2P7CWQQzN5/LapCawpf2DLh+LWD/cA7lNfCbFL6dNJOKgtynp8QbRsxExutn7Ofz1P1tXEdL3gnukw==",
|
||||
"version": "4.3.11",
|
||||
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.11.tgz",
|
||||
"integrity": "sha512-tJDrKbUzN6PSA88tWgg0L2R4Ln00XwecYQJPFI+RvlF2k1sx6VQYtuQ1SVxm8+bw5tF7GWV4xyb+3/KyzEpPUw==",
|
||||
"dependencies": {
|
||||
"mmdb-lib": "2.0.2",
|
||||
"tiny-lru": "10.3.0"
|
||||
"tiny-lru": "11.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12",
|
||||
|
@ -6002,15 +6027,15 @@
|
|||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/mysql2": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.2.0.tgz",
|
||||
"integrity": "sha512-0Vn6a9WSrq6fWwvPgrvIwnOCldiEcgbzapVRDAtDZ4cMTxN7pnGqCTx8EG32S/NYXl6AXkdO+9hV1tSIi/LigA==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.5.2.tgz",
|
||||
"integrity": "sha512-cptobmhYkYeTBIFp2c0piw2+gElpioga1rUw5UidHvo8yaHijMZoo8A3zyBVoo/K71f7ZFvrShA9iMIy9dCzCA==",
|
||||
"dependencies": {
|
||||
"denque": "^2.1.0",
|
||||
"generate-function": "^2.3.1",
|
||||
"iconv-lite": "^0.6.3",
|
||||
"long": "^5.2.1",
|
||||
"lru-cache": "^7.14.1",
|
||||
"lru-cache": "^8.0.0",
|
||||
"named-placeholders": "^1.1.3",
|
||||
"seq-queue": "^0.0.5",
|
||||
"sqlstring": "^2.3.2"
|
||||
|
@ -6031,11 +6056,11 @@
|
|||
}
|
||||
},
|
||||
"node_modules/mysql2/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||
"version": "8.0.5",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz",
|
||||
"integrity": "sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
"node": ">=16.14"
|
||||
}
|
||||
},
|
||||
"node_modules/named-placeholders": {
|
||||
|
@ -6089,11 +6114,6 @@
|
|||
"integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/node-worker-threads-pool": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/node-worker-threads-pool/-/node-worker-threads-pool-1.5.1.tgz",
|
||||
"integrity": "sha512-7TXAhpMm+jO4MfESxYLtMGSnJWv+itdNHMdaFmeZuPXxwFGU90mtEB42BciUULXOUAxYBfXILAuvrSG3rQZ7mw=="
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
|
@ -6159,17 +6179,17 @@
|
|||
}
|
||||
},
|
||||
"node_modules/optionator": {
|
||||
"version": "0.9.1",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
||||
"integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
|
||||
"integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@aashutoshrathi/word-wrap": "^1.2.3",
|
||||
"deep-is": "^0.1.3",
|
||||
"fast-levenshtein": "^2.0.6",
|
||||
"levn": "^0.4.1",
|
||||
"prelude-ls": "^1.2.1",
|
||||
"type-check": "^0.4.0",
|
||||
"word-wrap": "^1.2.3"
|
||||
"type-check": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
|
@ -6400,15 +6420,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "2.8.7",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.7.tgz",
|
||||
"integrity": "sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==",
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz",
|
||||
"integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"prettier": "bin-prettier.js"
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.13.0"
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||
|
@ -6465,6 +6485,11 @@
|
|||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"node_modules/punycode": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||
|
@ -6665,6 +6690,10 @@
|
|||
"queue-microtask": "^1.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/rust-gbt": {
|
||||
"resolved": "rust-gbt",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
|
@ -6690,9 +6719,9 @@
|
|||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
|
@ -7029,9 +7058,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/tiny-lru": {
|
||||
"version": "10.3.0",
|
||||
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-10.3.0.tgz",
|
||||
"integrity": "sha512-vTKRT2AEO1sViFDWAIzZVpV8KURCaMtnHa4RZB3XqtYLbrTO/fLDXKPEX9kVWq9u+nZREkwakbcmzGgvJm8QKA==",
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-11.0.1.tgz",
|
||||
"integrity": "sha512-iNgFugVuQgBKrqeO/mpiTTgmBsTP0WL6yeuLfLs/Ctf0pI/ixGqIRm8sDCwMcXGe9WWvt2sGXI5mNqZbValmJg==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@ -7072,9 +7101,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/ts-jest": {
|
||||
"version": "29.0.5",
|
||||
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.0.5.tgz",
|
||||
"integrity": "sha512-PL3UciSgIpQ7f6XjVOmbi96vmDHUqAyqDr8YxzopDqX3kfgYtX1cuNeBjP+L9sFXi6nzsGGA6R3fP3DDDJyrxA==",
|
||||
"version": "29.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz",
|
||||
"integrity": "sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"bs-logger": "0.x",
|
||||
|
@ -7083,7 +7112,7 @@
|
|||
"json5": "^2.2.3",
|
||||
"lodash.memoize": "4.x",
|
||||
"make-error": "1.x",
|
||||
"semver": "7.x",
|
||||
"semver": "^7.5.3",
|
||||
"yargs-parser": "^21.0.1"
|
||||
},
|
||||
"bin": {
|
||||
|
@ -7097,7 +7126,7 @@
|
|||
"@jest/types": "^29.0.0",
|
||||
"babel-jest": "^29.0.0",
|
||||
"jest": "^29.0.0",
|
||||
"typescript": ">=4.3"
|
||||
"typescript": ">=4.3 <6"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@babel/core": {
|
||||
|
@ -7127,9 +7156,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/ts-jest/node_modules/semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -7262,9 +7291,9 @@
|
|||
"integrity": "sha512-7uc1O8h1M1g0rArakJdf0uLRSSgFcYexrVoKo+bzJd32gd4gDy2L/Z+8/FjPnU9ydY3pEnVPtr9FyscYY60K1g=="
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "4.7.4",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz",
|
||||
"integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==",
|
||||
"version": "4.9.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
|
||||
"integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
|
@ -7384,15 +7413,6 @@
|
|||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||
|
@ -7544,9 +7564,26 @@
|
|||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"rust-gbt": {
|
||||
"name": "gbt",
|
||||
"version": "3.0.0-dev",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@napi-rs/cli": "^2.16.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@aashutoshrathi/word-wrap": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
|
||||
"integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==",
|
||||
"dev": true
|
||||
},
|
||||
"@ampproject/remapping": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz",
|
||||
|
@ -8631,6 +8668,11 @@
|
|||
"resolved": "https://registry.npmjs.org/@mempool/electrum-client/-/electrum-client-1.1.9.tgz",
|
||||
"integrity": "sha512-mlvPiCzUlaETpYW3i6V87A24jjMYgsebaXtUo3WQyyLnYUuxs0KiXQ2mnKh3h15j8Xg/hfxeGIi+5OC9u0nftQ=="
|
||||
},
|
||||
"@napi-rs/cli": {
|
||||
"version": "2.16.1",
|
||||
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
|
||||
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA=="
|
||||
},
|
||||
"@noble/hashes": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz",
|
||||
|
@ -8909,9 +8951,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"@types/ws": {
|
||||
"version": "8.5.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz",
|
||||
"integrity": "sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==",
|
||||
"version": "8.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.5.tgz",
|
||||
"integrity": "sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
|
@ -8960,9 +9002,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -9041,9 +9083,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -9083,9 +9125,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -9220,12 +9262,13 @@
|
|||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.0",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"babel-jest": {
|
||||
|
@ -9371,9 +9414,9 @@
|
|||
"integrity": "sha512-lkc0XyiX9E9KiVAS1ZiOqK1xfiwvf4FXDDdkDq5crcDzOq+xGytY+14qCsqz7kCiy8rpN1CRNfacRhf9G3JNSA=="
|
||||
},
|
||||
"bitcoinjs-lib": {
|
||||
"version": "6.1.1",
|
||||
"resolved": "https://registry.npmjs.org/bitcoinjs-lib/-/bitcoinjs-lib-6.1.1.tgz",
|
||||
"integrity": "sha512-FYihfgTk29lt1eK2y48OtuarEDUnTprNBW3ctT8yHiOhvmeS3DzAVG6gI0VCvMkydz6UdlXlYNWIPqGD0SUYRQ==",
|
||||
"version": "6.1.3",
|
||||
"resolved": "https://registry.npmjs.org/bitcoinjs-lib/-/bitcoinjs-lib-6.1.3.tgz",
|
||||
"integrity": "sha512-TYXs/Qf+GNk2nnsB9HrXWqzFuEgCg0Gx+v3UW3B8VuceFHXVvhT+7hRnTSvwkX0i8rz2rtujeU6gFaDcFqYFDw==",
|
||||
"requires": {
|
||||
"@noble/hashes": "^1.2.0",
|
||||
"bech32": "^2.0.0",
|
||||
|
@ -11539,9 +11582,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -11935,12 +11978,12 @@
|
|||
}
|
||||
},
|
||||
"maxmind": {
|
||||
"version": "4.3.9",
|
||||
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.9.tgz",
|
||||
"integrity": "sha512-rEfIxZ9M2P7CWQQzN5/LapCawpf2DLh+LWD/cA7lNfCbFL6dNJOKgtynp8QbRsxExutn7Ofz1P1tXEdL3gnukw==",
|
||||
"version": "4.3.11",
|
||||
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.11.tgz",
|
||||
"integrity": "sha512-tJDrKbUzN6PSA88tWgg0L2R4Ln00XwecYQJPFI+RvlF2k1sx6VQYtuQ1SVxm8+bw5tF7GWV4xyb+3/KyzEpPUw==",
|
||||
"requires": {
|
||||
"mmdb-lib": "2.0.2",
|
||||
"tiny-lru": "10.3.0"
|
||||
"tiny-lru": "11.0.1"
|
||||
}
|
||||
},
|
||||
"media-typer": {
|
||||
|
@ -12024,15 +12067,15 @@
|
|||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"mysql2": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.2.0.tgz",
|
||||
"integrity": "sha512-0Vn6a9WSrq6fWwvPgrvIwnOCldiEcgbzapVRDAtDZ4cMTxN7pnGqCTx8EG32S/NYXl6AXkdO+9hV1tSIi/LigA==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.5.2.tgz",
|
||||
"integrity": "sha512-cptobmhYkYeTBIFp2c0piw2+gElpioga1rUw5UidHvo8yaHijMZoo8A3zyBVoo/K71f7ZFvrShA9iMIy9dCzCA==",
|
||||
"requires": {
|
||||
"denque": "^2.1.0",
|
||||
"generate-function": "^2.3.1",
|
||||
"iconv-lite": "^0.6.3",
|
||||
"long": "^5.2.1",
|
||||
"lru-cache": "^7.14.1",
|
||||
"lru-cache": "^8.0.0",
|
||||
"named-placeholders": "^1.1.3",
|
||||
"seq-queue": "^0.0.5",
|
||||
"sqlstring": "^2.3.2"
|
||||
|
@ -12047,9 +12090,9 @@
|
|||
}
|
||||
},
|
||||
"lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA=="
|
||||
"version": "8.0.5",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz",
|
||||
"integrity": "sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA=="
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -12097,11 +12140,6 @@
|
|||
"integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==",
|
||||
"dev": true
|
||||
},
|
||||
"node-worker-threads-pool": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/node-worker-threads-pool/-/node-worker-threads-pool-1.5.1.tgz",
|
||||
"integrity": "sha512-7TXAhpMm+jO4MfESxYLtMGSnJWv+itdNHMdaFmeZuPXxwFGU90mtEB42BciUULXOUAxYBfXILAuvrSG3rQZ7mw=="
|
||||
},
|
||||
"normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
|
@ -12149,17 +12187,17 @@
|
|||
}
|
||||
},
|
||||
"optionator": {
|
||||
"version": "0.9.1",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
||||
"integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
|
||||
"integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@aashutoshrathi/word-wrap": "^1.2.3",
|
||||
"deep-is": "^0.1.3",
|
||||
"fast-levenshtein": "^2.0.6",
|
||||
"levn": "^0.4.1",
|
||||
"prelude-ls": "^1.2.1",
|
||||
"type-check": "^0.4.0",
|
||||
"word-wrap": "^1.2.3"
|
||||
"type-check": "^0.4.0"
|
||||
}
|
||||
},
|
||||
"p-limit": {
|
||||
|
@ -12320,9 +12358,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"prettier": {
|
||||
"version": "2.8.7",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.7.tgz",
|
||||
"integrity": "sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==",
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz",
|
||||
"integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==",
|
||||
"dev": true
|
||||
},
|
||||
"pretty-format": {
|
||||
|
@ -12363,6 +12401,11 @@
|
|||
"ipaddr.js": "1.9.1"
|
||||
}
|
||||
},
|
||||
"proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"punycode": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||
|
@ -12481,6 +12524,12 @@
|
|||
"queue-microtask": "^1.2.2"
|
||||
}
|
||||
},
|
||||
"rust-gbt": {
|
||||
"version": "file:rust-gbt",
|
||||
"requires": {
|
||||
"@napi-rs/cli": "^2.16.1"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
|
@ -12492,9 +12541,9 @@
|
|||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"dev": true
|
||||
},
|
||||
"send": {
|
||||
|
@ -12757,9 +12806,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"tiny-lru": {
|
||||
"version": "10.3.0",
|
||||
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-10.3.0.tgz",
|
||||
"integrity": "sha512-vTKRT2AEO1sViFDWAIzZVpV8KURCaMtnHa4RZB3XqtYLbrTO/fLDXKPEX9kVWq9u+nZREkwakbcmzGgvJm8QKA=="
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-11.0.1.tgz",
|
||||
"integrity": "sha512-iNgFugVuQgBKrqeO/mpiTTgmBsTP0WL6yeuLfLs/Ctf0pI/ixGqIRm8sDCwMcXGe9WWvt2sGXI5mNqZbValmJg=="
|
||||
},
|
||||
"tmpl": {
|
||||
"version": "1.0.5",
|
||||
|
@ -12788,9 +12837,9 @@
|
|||
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="
|
||||
},
|
||||
"ts-jest": {
|
||||
"version": "29.0.5",
|
||||
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.0.5.tgz",
|
||||
"integrity": "sha512-PL3UciSgIpQ7f6XjVOmbi96vmDHUqAyqDr8YxzopDqX3kfgYtX1cuNeBjP+L9sFXi6nzsGGA6R3fP3DDDJyrxA==",
|
||||
"version": "29.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz",
|
||||
"integrity": "sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"bs-logger": "0.x",
|
||||
|
@ -12799,7 +12848,7 @@
|
|||
"json5": "^2.2.3",
|
||||
"lodash.memoize": "4.x",
|
||||
"make-error": "1.x",
|
||||
"semver": "7.x",
|
||||
"semver": "^7.5.3",
|
||||
"yargs-parser": "^21.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -12813,9 +12862,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.8",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
|
||||
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
|
||||
"version": "7.5.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
|
||||
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"lru-cache": "^6.0.0"
|
||||
|
@ -12901,9 +12950,9 @@
|
|||
"integrity": "sha512-7uc1O8h1M1g0rArakJdf0uLRSSgFcYexrVoKo+bzJd32gd4gDy2L/Z+8/FjPnU9ydY3pEnVPtr9FyscYY60K1g=="
|
||||
},
|
||||
"typescript": {
|
||||
"version": "4.7.4",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz",
|
||||
"integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ=="
|
||||
"version": "4.9.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
|
||||
"integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g=="
|
||||
},
|
||||
"unpipe": {
|
||||
"version": "1.0.0",
|
||||
|
@ -12982,12 +13031,6 @@
|
|||
"isexe": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
|
||||
"dev": true
|
||||
},
|
||||
"wrap-ansi": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "mempool-backend",
|
||||
"version": "2.6.0-dev",
|
||||
"version": "3.0.0-dev",
|
||||
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"homepage": "https://mempool.space",
|
||||
|
@ -22,10 +22,10 @@
|
|||
"main": "index.ts",
|
||||
"scripts": {
|
||||
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
|
||||
"build": "npm run tsc && npm run create-resources",
|
||||
"build": "npm run build-rust && npm run tsc && npm run create-resources",
|
||||
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
|
||||
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
|
||||
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
|
||||
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && mv rust-gbt package && npm run package-rm-build-deps",
|
||||
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint @napi-rs ../rust-gbt/target ../rust-gbt/node_modules ../rust-gbt/src)",
|
||||
"start": "node --max-old-space-size=2048 dist/index.js",
|
||||
"start-production": "node --max-old-space-size=16384 dist/index.js",
|
||||
"reindex-updated-pools": "npm run start-production --update-pools",
|
||||
|
@ -33,21 +33,22 @@
|
|||
"test": "./node_modules/.bin/jest --coverage",
|
||||
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
||||
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
||||
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
|
||||
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
|
||||
"build-rust": "cd rust-gbt && npm install"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.21.3",
|
||||
"@mempool/electrum-client": "1.1.9",
|
||||
"@types/node": "^18.15.3",
|
||||
"axios": "~0.27.2",
|
||||
"bitcoinjs-lib": "~6.1.0",
|
||||
"axios": "~1.4.0",
|
||||
"bitcoinjs-lib": "~6.1.3",
|
||||
"crypto-js": "~4.1.1",
|
||||
"express": "~4.18.2",
|
||||
"maxmind": "~4.3.8",
|
||||
"mysql2": "~3.2.0",
|
||||
"node-worker-threads-pool": "~1.5.1",
|
||||
"maxmind": "~4.3.11",
|
||||
"mysql2": "~3.5.2",
|
||||
"rust-gbt": "file:./rust-gbt",
|
||||
"socks-proxy-agent": "~7.0.0",
|
||||
"typescript": "~4.7.4",
|
||||
"typescript": "~4.9.3",
|
||||
"ws": "~8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -55,16 +56,16 @@
|
|||
"@babel/code-frame": "^7.18.6",
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/express": "^4.17.15",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/jest": "^29.5.0",
|
||||
"@types/ws": "~8.5.4",
|
||||
"@types/ws": "~8.5.5",
|
||||
"@typescript-eslint/eslint-plugin": "^5.55.0",
|
||||
"@typescript-eslint/parser": "^5.55.0",
|
||||
"eslint": "^8.36.0",
|
||||
"eslint-config-prettier": "^8.7.0",
|
||||
"eslint-config-prettier": "^8.8.0",
|
||||
"jest": "^29.5.0",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^29.0.5",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
|
4
backend/rust-gbt/.gitignore
vendored
Normal file
4
backend/rust-gbt/.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
*.node
|
||||
**/node_modules
|
||||
**/.DS_Store
|
||||
npm-debug.log*
|
25
backend/rust-gbt/Cargo.toml
Normal file
25
backend/rust-gbt/Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "gbt"
|
||||
version = "0.1.0"
|
||||
description = "An inefficient re-implementation of the getBlockTemplate algorithm in Rust"
|
||||
authors = ["mononaut"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
priority-queue = "1.3.2"
|
||||
bytes = "1.4.0"
|
||||
napi = { version = "2.13.2", features = ["napi8", "tokio_rt"] }
|
||||
napi-derive = "2.13.0"
|
||||
bytemuck = "1.13.1"
|
||||
tracing = "0.1.36"
|
||||
tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.15", features = ["env-filter"]}
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.0.1"
|
123
backend/rust-gbt/README.md
Normal file
123
backend/rust-gbt/README.md
Normal file
|
@ -0,0 +1,123 @@
|
|||
# gbt
|
||||
|
||||
**gbt:** rust implementation of the getBlockTemplate algorithm
|
||||
|
||||
This project was bootstrapped by [napi](https://www.npmjs.com/package/@napi-rs/cli).
|
||||
|
||||
## Installing gbt
|
||||
|
||||
Installing gbt requires a [supported version of Node and Rust](https://github.com/napi-rs/napi-rs#platform-support).
|
||||
|
||||
The build process also requires [Rust](https://www.rust-lang.org/tools/install) to be installed.
|
||||
|
||||
You can install the project with npm. In the project directory, run:
|
||||
|
||||
```sh
|
||||
$ npm install
|
||||
```
|
||||
|
||||
This fully installs the project, including installing any dependencies and running the build.
|
||||
|
||||
## Building gbt
|
||||
|
||||
If you have already installed the project and only want to run the build, run:
|
||||
|
||||
```sh
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
This command uses the [napi build](https://www.npmjs.com/package/@napi-rs/cli) utility to run the Rust build and copy the built library into `./gbt.[TARGET_TRIPLE].node`.
|
||||
|
||||
## Exploring gbt
|
||||
|
||||
After building gbt, you can explore its exports at the Node REPL:
|
||||
|
||||
```sh
|
||||
$ npm install
|
||||
$ node
|
||||
> require('.').hello()
|
||||
"hello node"
|
||||
```
|
||||
|
||||
## Available Scripts
|
||||
|
||||
In the project directory, you can run:
|
||||
|
||||
### `npm install`
|
||||
|
||||
Installs the project, including running `npm run build-release`.
|
||||
|
||||
### `npm build`
|
||||
|
||||
Builds the Node addon (`gbt.[TARGET_TRIPLE].node`) from source.
|
||||
|
||||
Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm build` and `npm build-*` commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html):
|
||||
|
||||
```
|
||||
npm run build -- --feature=beetle
|
||||
```
|
||||
|
||||
#### `npm build-debug`
|
||||
|
||||
Alias for `npm build`.
|
||||
|
||||
#### `npm build-release`
|
||||
|
||||
Same as [`npm build`](#npm-build) but, builds the module with the [`release`](https://doc.rust-lang.org/cargo/reference/profiles.html#release) profile. Release builds will compile slower, but run faster.
|
||||
|
||||
### `npm test`
|
||||
|
||||
Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/).
|
||||
|
||||
## Project Layout
|
||||
|
||||
The directory structure of this project is:
|
||||
|
||||
```
|
||||
gbt/
|
||||
├── Cargo.toml
|
||||
├── README.md
|
||||
├── gbt.[TARGET_TRIPLE].node
|
||||
├── package.json
|
||||
├── src/
|
||||
| └── lib.rs
|
||||
└── target/
|
||||
```
|
||||
|
||||
### Cargo.toml
|
||||
|
||||
The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command.
|
||||
|
||||
### README.md
|
||||
|
||||
This file.
|
||||
|
||||
### gbt.\[TARGET_TRIPLE\].node
|
||||
|
||||
The Node addon—i.e., a binary Node module—generated by building the project. This is the main module for this package, as dictated by the `"main"` key in `package.json`.
|
||||
|
||||
Under the hood, a [Node addon](https://nodejs.org/api/addons.html) is a [dynamically-linked shared object](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries). The `"build"` script produces this file by copying it from within the `target/` directory, which is where the Rust build produces the shared object.
|
||||
|
||||
### package.json
|
||||
|
||||
The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command.
|
||||
|
||||
### src/
|
||||
|
||||
The directory tree containing the Rust source code for the project.
|
||||
|
||||
### src/lib.rs
|
||||
|
||||
The Rust library's main module.
|
||||
|
||||
### target/
|
||||
|
||||
Binary artifacts generated by the Rust build.
|
||||
|
||||
## Learn More
|
||||
|
||||
To learn more about Neon, see the [Napi-RS documentation](https://napi.rs/docs/introduction/getting-started).
|
||||
|
||||
To learn more about Rust, see the [Rust documentation](https://www.rust-lang.org).
|
||||
|
||||
To learn more about Node, see the [Node documentation](https://nodejs.org).
|
3
backend/rust-gbt/build.rs
Normal file
3
backend/rust-gbt/build.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
fn main() {
|
||||
napi_build::setup();
|
||||
}
|
45
backend/rust-gbt/index.d.ts
vendored
Normal file
45
backend/rust-gbt/index.d.ts
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
export interface ThreadTransaction {
|
||||
uid: number
|
||||
order: number
|
||||
fee: number
|
||||
weight: number
|
||||
sigops: number
|
||||
effectiveFeePerVsize: number
|
||||
inputs: Array<number>
|
||||
}
|
||||
export class GbtGenerator {
|
||||
constructor()
|
||||
/**
|
||||
* # Errors
|
||||
*
|
||||
* Rejects if the thread panics or if the Mutex is poisoned.
|
||||
*/
|
||||
make(mempool: Array<ThreadTransaction>, maxUid: number): Promise<GbtResult>
|
||||
/**
|
||||
* # Errors
|
||||
*
|
||||
* Rejects if the thread panics or if the Mutex is poisoned.
|
||||
*/
|
||||
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, maxUid: number): Promise<GbtResult>
|
||||
}
|
||||
/**
|
||||
* The result from calling the gbt function.
|
||||
*
|
||||
* This tuple contains the following:
|
||||
* blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
|
||||
* block_weights: A Vector of total weights per block.
|
||||
* clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
|
||||
* rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
|
||||
*/
|
||||
export class GbtResult {
|
||||
blocks: Array<Array<number>>
|
||||
blockWeights: Array<number>
|
||||
clusters: Array<Array<number>>
|
||||
rates: Array<Array<number>>
|
||||
constructor(blocks: Array<Array<number>>, blockWeights: Array<number>, clusters: Array<Array<number>>, rates: Array<Array<number>>)
|
||||
}
|
258
backend/rust-gbt/index.js
Normal file
258
backend/rust-gbt/index.js
Normal file
|
@ -0,0 +1,258 @@
|
|||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const { existsSync, readFileSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
|
||||
const { platform, arch } = process
|
||||
|
||||
let nativeBinding = null
|
||||
let localFileExisted = false
|
||||
let loadError = null
|
||||
|
||||
function isMusl() {
|
||||
// For Node 10
|
||||
if (!process.report || typeof process.report.getReport !== 'function') {
|
||||
try {
|
||||
const lddPath = require('child_process').execSync('which ldd').toString().trim()
|
||||
return readFileSync(lddPath, 'utf8').includes('musl')
|
||||
} catch (e) {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
const { glibcVersionRuntime } = process.report.getReport().header
|
||||
return !glibcVersionRuntime
|
||||
}
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case 'android':
|
||||
switch (arch) {
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.android-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-android-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm-eabi.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.android-arm-eabi.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-android-arm-eabi')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Android ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'win32':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.win32-x64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.win32-x64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-win32-x64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'ia32':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.win32-ia32-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.win32-ia32-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-win32-ia32-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.win32-arm64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.win32-arm64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-win32-arm64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Windows: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'darwin':
|
||||
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-universal.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.darwin-universal.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-darwin-universal')
|
||||
}
|
||||
break
|
||||
} catch {}
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.darwin-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-darwin-x64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.darwin-arm64.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.darwin-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-darwin-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on macOS: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'freebsd':
|
||||
if (arch !== 'x64') {
|
||||
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
|
||||
}
|
||||
localFileExisted = existsSync(join(__dirname, 'gbt.freebsd-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.freebsd-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-freebsd-x64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'linux':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.linux-x64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.linux-x64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-linux-x64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.linux-x64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.linux-x64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-linux-x64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.linux-arm64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.linux-arm64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-linux-arm64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.linux-arm64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.linux-arm64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-linux-arm64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'gbt.linux-arm-gnueabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./gbt.linux-arm-gnueabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('gbt-linux-arm-gnueabihf')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Linux: ${arch}`)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadError) {
|
||||
throw loadError
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { GbtGenerator, GbtResult } = nativeBinding
|
||||
|
||||
module.exports.GbtGenerator = GbtGenerator
|
||||
module.exports.GbtResult = GbtResult
|
34
backend/rust-gbt/package-lock.json
generated
Normal file
34
backend/rust-gbt/package-lock.json
generated
Normal file
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"name": "gbt",
|
||||
"version": "3.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "gbt",
|
||||
"version": "3.0.0-dev",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@napi-rs/cli": "^2.16.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
},
|
||||
"node_modules/@napi-rs/cli": {
|
||||
"version": "2.16.1",
|
||||
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
|
||||
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
|
||||
"bin": {
|
||||
"napi": "scripts/index.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Brooooooklyn"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
33
backend/rust-gbt/package.json
Normal file
33
backend/rust-gbt/package.json
Normal file
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "gbt",
|
||||
"version": "3.0.0-dev",
|
||||
"description": "An inefficient re-implementation of the getBlockTemplate algorithm in Rust",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "napi build --platform",
|
||||
"build-debug": "npm run build",
|
||||
"build-release": "npm run build -- --release --strip",
|
||||
"install": "npm run build-release",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "cargo test"
|
||||
},
|
||||
"author": "mononaut",
|
||||
"napi": {
|
||||
"name": "gbt",
|
||||
"triples": {
|
||||
"defaults": false,
|
||||
"additional": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-freebsd"
|
||||
]
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/cli": "^2.16.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
}
|
220
backend/rust-gbt/src/audit_transaction.rs
Normal file
220
backend/rust-gbt/src/audit_transaction.rs
Normal file
|
@ -0,0 +1,220 @@
|
|||
use crate::{
|
||||
u32_hasher_types::{u32hashset_new, U32HasherState},
|
||||
ThreadTransaction,
|
||||
};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashSet,
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AuditTransaction {
|
||||
pub uid: u32,
|
||||
order: u32,
|
||||
pub fee: u64,
|
||||
pub weight: u32,
|
||||
// exact sigop-adjusted weight
|
||||
pub sigop_adjusted_weight: u32,
|
||||
// sigop-adjusted vsize rounded up the the next integer
|
||||
pub sigop_adjusted_vsize: u32,
|
||||
pub sigops: u32,
|
||||
adjusted_fee_per_vsize: f64,
|
||||
pub effective_fee_per_vsize: f64,
|
||||
pub dependency_rate: f64,
|
||||
pub inputs: Vec<u32>,
|
||||
pub relatives_set_flag: bool,
|
||||
pub ancestors: HashSet<u32, U32HasherState>,
|
||||
pub children: HashSet<u32, U32HasherState>,
|
||||
ancestor_fee: u64,
|
||||
ancestor_sigop_adjusted_weight: u32,
|
||||
ancestor_sigop_adjusted_vsize: u32,
|
||||
ancestor_sigops: u32,
|
||||
// Safety: Must be private to prevent NaN breaking Ord impl.
|
||||
score: f64,
|
||||
pub used: bool,
|
||||
/// whether this transaction has been moved to the "modified" priority queue
|
||||
pub modified: bool,
|
||||
pub dirty: bool,
|
||||
}
|
||||
|
||||
impl Hash for AuditTransaction {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.uid.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AuditTransaction {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.uid == other.uid
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AuditTransaction {}
|
||||
|
||||
#[inline]
|
||||
pub fn partial_cmp_uid_score(a: (u32, u32, f64), b: (u32, u32, f64)) -> Option<Ordering> {
|
||||
// If either score is NaN, this is false,
|
||||
// and partial_cmp will return None
|
||||
if a.2 != b.2 {
|
||||
// compare by score (sorts by ascending score)
|
||||
a.2.partial_cmp(&b.2)
|
||||
} else if a.1 != b.1 {
|
||||
// tie-break by comparing partial txids (sorts by descending txid)
|
||||
Some(b.1.cmp(&a.1))
|
||||
} else {
|
||||
// tie-break partial txid collisions by comparing uids (sorts by descending uid)
|
||||
Some(b.0.cmp(&a.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for AuditTransaction {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
partial_cmp_uid_score(
|
||||
(self.uid, self.order, self.score),
|
||||
(other.uid, other.order, other.score),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for AuditTransaction {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Safety: The only possible values for score are f64
|
||||
// that are not NaN. This is because outside code can not
|
||||
// freely assign score. Also, calc_new_score guarantees no NaN.
|
||||
self.partial_cmp(other).expect("score will never be NaN")
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn calc_fee_rate(fee: f64, vsize: f64) -> f64 {
|
||||
fee / (if vsize == 0.0 { 1.0 } else { vsize })
|
||||
}
|
||||
|
||||
impl AuditTransaction {
|
||||
pub fn from_thread_transaction(tx: &ThreadTransaction) -> Self {
|
||||
// rounded up to the nearest integer
|
||||
let is_adjusted = tx.weight < (tx.sigops * 20);
|
||||
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
|
||||
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
|
||||
let effective_fee_per_vsize = if is_adjusted {
|
||||
calc_fee_rate(tx.fee, f64::from(sigop_adjusted_weight) / 4.0)
|
||||
} else {
|
||||
tx.effective_fee_per_vsize
|
||||
};
|
||||
Self {
|
||||
uid: tx.uid,
|
||||
order: tx.order,
|
||||
fee: tx.fee as u64,
|
||||
weight: tx.weight,
|
||||
sigop_adjusted_weight,
|
||||
sigop_adjusted_vsize,
|
||||
sigops: tx.sigops,
|
||||
adjusted_fee_per_vsize: calc_fee_rate(tx.fee, f64::from(sigop_adjusted_vsize)),
|
||||
effective_fee_per_vsize,
|
||||
dependency_rate: f64::INFINITY,
|
||||
inputs: tx.inputs.clone(),
|
||||
relatives_set_flag: false,
|
||||
ancestors: u32hashset_new(),
|
||||
children: u32hashset_new(),
|
||||
ancestor_fee: tx.fee as u64,
|
||||
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
|
||||
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
|
||||
ancestor_sigops: tx.sigops,
|
||||
score: 0.0,
|
||||
used: false,
|
||||
modified: false,
|
||||
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub const fn score(&self) -> f64 {
|
||||
self.score
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub const fn order(&self) -> u32 {
|
||||
self.order
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub const fn ancestor_sigop_adjusted_vsize(&self) -> u32 {
|
||||
self.ancestor_sigop_adjusted_vsize
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub const fn ancestor_sigops(&self) -> u32 {
|
||||
self.ancestor_sigops
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn cluster_rate(&self) -> f64 {
|
||||
// Safety: self.ancestor_weight can never be 0.
|
||||
// Even if it could, as it approaches 0, the value inside the min() call
|
||||
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
|
||||
// the smaller of the two. If either side is NaN, the other side is returned.
|
||||
self.dependency_rate.min(calc_fee_rate(
|
||||
self.ancestor_fee as f64,
|
||||
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn set_dirty_if_different(&mut self, cluster_rate: f64) {
|
||||
if self.effective_fee_per_vsize != cluster_rate {
|
||||
self.effective_fee_per_vsize = cluster_rate;
|
||||
self.dirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety: This function must NEVER set score to NaN.
|
||||
#[inline]
|
||||
fn calc_new_score(&mut self) {
|
||||
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
|
||||
self.ancestor_fee as f64,
|
||||
f64::from(self.ancestor_sigop_adjusted_vsize),
|
||||
));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn set_ancestors(
|
||||
&mut self,
|
||||
ancestors: HashSet<u32, U32HasherState>,
|
||||
total_fee: u64,
|
||||
total_sigop_adjusted_weight: u32,
|
||||
total_sigop_adjusted_vsize: u32,
|
||||
total_sigops: u32,
|
||||
) {
|
||||
self.ancestors = ancestors;
|
||||
self.ancestor_fee = self.fee + total_fee;
|
||||
self.ancestor_sigop_adjusted_weight =
|
||||
self.sigop_adjusted_weight + total_sigop_adjusted_weight;
|
||||
self.ancestor_sigop_adjusted_vsize = self.sigop_adjusted_vsize + total_sigop_adjusted_vsize;
|
||||
self.ancestor_sigops = self.sigops + total_sigops;
|
||||
self.calc_new_score();
|
||||
self.relatives_set_flag = true;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn remove_root(
|
||||
&mut self,
|
||||
root_txid: u32,
|
||||
root_fee: u64,
|
||||
root_sigop_adjusted_weight: u32,
|
||||
root_sigop_adjusted_vsize: u32,
|
||||
root_sigops: u32,
|
||||
cluster_rate: f64,
|
||||
) -> f64 {
|
||||
let old_score = self.score();
|
||||
self.dependency_rate = self.dependency_rate.min(cluster_rate);
|
||||
if self.ancestors.remove(&root_txid) {
|
||||
self.ancestor_fee -= root_fee;
|
||||
self.ancestor_sigop_adjusted_weight -= root_sigop_adjusted_weight;
|
||||
self.ancestor_sigop_adjusted_vsize -= root_sigop_adjusted_vsize;
|
||||
self.ancestor_sigops -= root_sigops;
|
||||
self.calc_new_score();
|
||||
}
|
||||
old_score
|
||||
}
|
||||
}
|
421
backend/rust-gbt/src/gbt.rs
Normal file
421
backend/rust-gbt/src/gbt.rs
Normal file
|
@ -0,0 +1,421 @@
|
|||
use priority_queue::PriorityQueue;
|
||||
use std::{cmp::Ordering, collections::HashSet, mem::ManuallyDrop};
|
||||
use tracing::{info, trace};
|
||||
|
||||
use crate::{
|
||||
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
|
||||
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
|
||||
GbtResult, ThreadTransactionsMap,
|
||||
};
|
||||
|
||||
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
|
||||
const BLOCK_SIGOPS: u32 = 80_000;
|
||||
const BLOCK_RESERVED_WEIGHT: u32 = 4_000;
|
||||
const BLOCK_RESERVED_SIGOPS: u32 = 400;
|
||||
const MAX_BLOCKS: usize = 8;
|
||||
|
||||
type AuditPool = Vec<Option<ManuallyDrop<AuditTransaction>>>;
|
||||
type ModifiedQueue = PriorityQueue<u32, TxPriority, U32HasherState>;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TxPriority {
|
||||
uid: u32,
|
||||
order: u32,
|
||||
score: f64,
|
||||
}
|
||||
impl PartialEq for TxPriority {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.uid == other.uid
|
||||
}
|
||||
}
|
||||
impl Eq for TxPriority {}
|
||||
impl PartialOrd for TxPriority {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
partial_cmp_uid_score(
|
||||
(self.uid, self.order, self.score),
|
||||
(other.uid, other.order, other.score),
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Ord for TxPriority {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.partial_cmp(other).expect("score will never be NaN")
|
||||
}
|
||||
}
|
||||
|
||||
/// Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core.
|
||||
///
|
||||
/// See `BlockAssembler` in Bitcoin Core's
|
||||
/// [miner.cpp](https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp).
|
||||
/// Ported from mempool backend's
|
||||
/// [tx-selection-worker.ts](https://github.com/mempool/mempool/blob/master/backend/src/api/tx-selection-worker.ts).
|
||||
//
|
||||
// TODO: Make gbt smaller to fix these lints.
|
||||
#[allow(clippy::too_many_lines)]
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
|
||||
let mempool_len = mempool.len();
|
||||
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
|
||||
audit_pool.resize(max_uid + 1, None);
|
||||
let mut mempool_stack: Vec<u32> = Vec::with_capacity(mempool_len);
|
||||
let mut clusters: Vec<Vec<u32>> = Vec::new();
|
||||
let mut block_weights: Vec<u32> = Vec::new();
|
||||
|
||||
info!("Initializing working structs");
|
||||
for (uid, tx) in &mut *mempool {
|
||||
let audit_tx = AuditTransaction::from_thread_transaction(tx);
|
||||
// Safety: audit_pool and mempool_stack must always contain the same transactions
|
||||
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
|
||||
mempool_stack.push(*uid);
|
||||
}
|
||||
|
||||
info!("Building relatives graph & calculate ancestor scores");
|
||||
for txid in &mempool_stack {
|
||||
set_relatives(*txid, &mut audit_pool);
|
||||
}
|
||||
trace!("Post relative graph Audit Pool: {:#?}", audit_pool);
|
||||
|
||||
info!("Sorting by descending ancestor score");
|
||||
let mut mempool_stack: Vec<(u32, u32, f64)> = mempool_stack
|
||||
.into_iter()
|
||||
.map(|txid| {
|
||||
let atx = audit_pool
|
||||
.get(txid as usize)
|
||||
.and_then(Option::as_ref)
|
||||
.expect("All txids are from audit_pool");
|
||||
(txid, atx.order(), atx.score())
|
||||
})
|
||||
.collect();
|
||||
mempool_stack.sort_unstable_by(|a, b| partial_cmp_uid_score(*a, *b).expect("Not NaN"));
|
||||
let mut mempool_stack: Vec<u32> = mempool_stack.into_iter().map(|(txid, _, _)| txid).collect();
|
||||
|
||||
info!("Building blocks by greedily choosing the highest feerate package");
|
||||
info!("(i.e. the package rooted in the transaction with the best ancestor score)");
|
||||
let mut blocks: Vec<Vec<u32>> = Vec::new();
|
||||
let mut block_weight: u32 = BLOCK_RESERVED_WEIGHT;
|
||||
let mut block_sigops: u32 = BLOCK_RESERVED_SIGOPS;
|
||||
// No need to be bigger than 4096 transactions for the per-block transaction Vec.
|
||||
let initial_txes_per_block: usize = 4096.min(mempool_len);
|
||||
let mut transactions: Vec<u32> = Vec::with_capacity(initial_txes_per_block);
|
||||
let mut modified: ModifiedQueue = u32priority_queue_with_capacity(mempool_len);
|
||||
let mut overflow: Vec<u32> = Vec::new();
|
||||
let mut failures = 0;
|
||||
while !mempool_stack.is_empty() || !modified.is_empty() {
|
||||
// This trace log storm is big, so to make scrolling through
|
||||
// Each iteration easier, leaving a bunch of empty rows
|
||||
// And a header of ======
|
||||
trace!("\n\n\n\n\n\n\n\n\n\n==================================");
|
||||
trace!("mempool_array: {:#?}", mempool_stack);
|
||||
trace!("clusters: {:#?}", clusters);
|
||||
trace!("modified: {:#?}", modified);
|
||||
trace!("audit_pool: {:#?}", audit_pool);
|
||||
trace!("blocks: {:#?}", blocks);
|
||||
trace!("block_weight: {:#?}", block_weight);
|
||||
trace!("block_sigops: {:#?}", block_sigops);
|
||||
trace!("transactions: {:#?}", transactions);
|
||||
trace!("overflow: {:#?}", overflow);
|
||||
trace!("failures: {:#?}", failures);
|
||||
trace!("\n==================================");
|
||||
|
||||
let next_from_stack = next_valid_from_stack(&mut mempool_stack, &audit_pool);
|
||||
let next_from_queue = next_valid_from_queue(&mut modified, &audit_pool);
|
||||
if next_from_stack.is_none() && next_from_queue.is_none() {
|
||||
continue;
|
||||
}
|
||||
let (next_tx, from_stack) = match (next_from_stack, next_from_queue) {
|
||||
(Some(stack_tx), Some(queue_tx)) => match queue_tx.cmp(stack_tx) {
|
||||
std::cmp::Ordering::Less => (stack_tx, true),
|
||||
_ => (queue_tx, false),
|
||||
},
|
||||
(Some(stack_tx), None) => (stack_tx, true),
|
||||
(None, Some(queue_tx)) => (queue_tx, false),
|
||||
(None, None) => unreachable!(),
|
||||
};
|
||||
|
||||
if from_stack {
|
||||
mempool_stack.pop();
|
||||
} else {
|
||||
modified.pop();
|
||||
}
|
||||
|
||||
if blocks.len() < (MAX_BLOCKS - 1)
|
||||
&& ((block_weight + (4 * next_tx.ancestor_sigop_adjusted_vsize())
|
||||
>= MAX_BLOCK_WEIGHT_UNITS)
|
||||
|| (block_sigops + next_tx.ancestor_sigops() > BLOCK_SIGOPS))
|
||||
{
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(next_tx.uid);
|
||||
failures += 1;
|
||||
} else {
|
||||
let mut package: Vec<(u32, u32, usize)> = Vec::new();
|
||||
let mut cluster: Vec<u32> = Vec::new();
|
||||
let is_cluster: bool = !next_tx.ancestors.is_empty();
|
||||
for ancestor_id in &next_tx.ancestors {
|
||||
if let Some(Some(ancestor)) = audit_pool.get(*ancestor_id as usize) {
|
||||
package.push((*ancestor_id, ancestor.order(), ancestor.ancestors.len()));
|
||||
}
|
||||
}
|
||||
package.sort_unstable_by(|a, b| -> Ordering {
|
||||
if a.2 != b.2 {
|
||||
// order by ascending ancestor count
|
||||
a.2.cmp(&b.2)
|
||||
} else if a.1 != b.1 {
|
||||
// tie-break by ascending partial txid
|
||||
a.1.cmp(&b.1)
|
||||
} else {
|
||||
// tie-break partial txid collisions by ascending uid
|
||||
a.0.cmp(&b.0)
|
||||
}
|
||||
});
|
||||
package.push((next_tx.uid, next_tx.order(), next_tx.ancestors.len()));
|
||||
|
||||
let cluster_rate = next_tx.cluster_rate();
|
||||
|
||||
for (txid, _, _) in &package {
|
||||
cluster.push(*txid);
|
||||
if let Some(Some(tx)) = audit_pool.get_mut(*txid as usize) {
|
||||
tx.used = true;
|
||||
tx.set_dirty_if_different(cluster_rate);
|
||||
transactions.push(tx.uid);
|
||||
block_weight += tx.weight;
|
||||
block_sigops += tx.sigops;
|
||||
}
|
||||
update_descendants(*txid, &mut audit_pool, &mut modified, cluster_rate);
|
||||
}
|
||||
|
||||
if is_cluster {
|
||||
clusters.push(cluster);
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
}
|
||||
|
||||
// this block is full
|
||||
let exceeded_package_tries =
|
||||
failures > 1000 && block_weight > (MAX_BLOCK_WEIGHT_UNITS - BLOCK_RESERVED_WEIGHT);
|
||||
let queue_is_empty = mempool_stack.is_empty() && modified.is_empty();
|
||||
if (exceeded_package_tries || queue_is_empty) && blocks.len() < (MAX_BLOCKS - 1) {
|
||||
// finalize this block
|
||||
if !transactions.is_empty() {
|
||||
blocks.push(transactions);
|
||||
block_weights.push(block_weight);
|
||||
}
|
||||
// reset for the next block
|
||||
transactions = Vec::with_capacity(initial_txes_per_block);
|
||||
block_weight = BLOCK_RESERVED_WEIGHT;
|
||||
block_sigops = BLOCK_RESERVED_SIGOPS;
|
||||
failures = 0;
|
||||
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
|
||||
overflow.reverse();
|
||||
for overflowed in &overflow {
|
||||
if let Some(Some(overflowed_tx)) = audit_pool.get(*overflowed as usize) {
|
||||
if overflowed_tx.modified {
|
||||
modified.push(
|
||||
*overflowed,
|
||||
TxPriority {
|
||||
uid: *overflowed,
|
||||
order: overflowed_tx.order(),
|
||||
score: overflowed_tx.score(),
|
||||
},
|
||||
);
|
||||
} else {
|
||||
mempool_stack.push(*overflowed);
|
||||
}
|
||||
}
|
||||
}
|
||||
overflow = Vec::new();
|
||||
}
|
||||
}
|
||||
info!("add the final unbounded block if it contains any transactions");
|
||||
if !transactions.is_empty() {
|
||||
blocks.push(transactions);
|
||||
block_weights.push(block_weight);
|
||||
}
|
||||
|
||||
info!("make a list of dirty transactions and their new rates");
|
||||
let mut rates: Vec<Vec<f64>> = Vec::new();
|
||||
for (uid, thread_tx) in mempool {
|
||||
// Takes ownership of the audit_tx and replaces with None
|
||||
if let Some(Some(audit_tx)) = audit_pool.get_mut(*uid as usize).map(Option::take) {
|
||||
trace!("txid: {}, is_dirty: {}", uid, audit_tx.dirty);
|
||||
if audit_tx.dirty {
|
||||
rates.push(vec![f64::from(*uid), audit_tx.effective_fee_per_vsize]);
|
||||
thread_tx.effective_fee_per_vsize = audit_tx.effective_fee_per_vsize;
|
||||
}
|
||||
// Drops the AuditTransaction manually
|
||||
// There are no audit_txs that are not in the mempool HashMap
|
||||
// So there is guaranteed to be no memory leaks.
|
||||
ManuallyDrop::into_inner(audit_tx);
|
||||
}
|
||||
}
|
||||
trace!("\n\n\n\n\n====================");
|
||||
trace!("blocks: {:#?}", blocks);
|
||||
trace!("clusters: {:#?}", clusters);
|
||||
trace!("rates: {:#?}\n====================\n\n\n\n\n", rates);
|
||||
|
||||
GbtResult {
|
||||
blocks,
|
||||
block_weights,
|
||||
clusters,
|
||||
rates,
|
||||
}
|
||||
}
|
||||
|
||||
fn next_valid_from_stack<'a>(
|
||||
mempool_stack: &mut Vec<u32>,
|
||||
audit_pool: &'a AuditPool,
|
||||
) -> Option<&'a AuditTransaction> {
|
||||
while let Some(next_txid) = mempool_stack.last() {
|
||||
match audit_pool.get(*next_txid as usize) {
|
||||
Some(Some(tx)) if !tx.used && !tx.modified => {
|
||||
return Some(tx);
|
||||
}
|
||||
_ => {
|
||||
mempool_stack.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn next_valid_from_queue<'a>(
|
||||
queue: &mut ModifiedQueue,
|
||||
audit_pool: &'a AuditPool,
|
||||
) -> Option<&'a AuditTransaction> {
|
||||
while let Some((next_txid, _)) = queue.peek() {
|
||||
match audit_pool.get(*next_txid as usize) {
|
||||
Some(Some(tx)) if !tx.used => {
|
||||
return Some(tx);
|
||||
}
|
||||
_ => {
|
||||
queue.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
|
||||
let mut parents: HashSet<u32, U32HasherState> = u32hashset_new();
|
||||
if let Some(Some(tx)) = audit_pool.get(txid as usize) {
|
||||
if tx.relatives_set_flag {
|
||||
return;
|
||||
}
|
||||
for input in &tx.inputs {
|
||||
parents.insert(*input);
|
||||
}
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut ancestors: HashSet<u32, U32HasherState> = u32hashset_new();
|
||||
for parent_id in &parents {
|
||||
set_relatives(*parent_id, audit_pool);
|
||||
|
||||
if let Some(Some(parent)) = audit_pool.get_mut(*parent_id as usize) {
|
||||
// Safety: ancestors must always contain only txes in audit_pool
|
||||
ancestors.insert(*parent_id);
|
||||
parent.children.insert(txid);
|
||||
for ancestor in &parent.ancestors {
|
||||
ancestors.insert(*ancestor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut total_fee: u64 = 0;
|
||||
let mut total_sigop_adjusted_weight: u32 = 0;
|
||||
let mut total_sigop_adjusted_vsize: u32 = 0;
|
||||
let mut total_sigops: u32 = 0;
|
||||
|
||||
for ancestor_id in &ancestors {
|
||||
let Some(ancestor) = audit_pool
|
||||
.get(*ancestor_id as usize)
|
||||
.expect("audit_pool contains all ancestors") else { todo!() };
|
||||
total_fee += ancestor.fee;
|
||||
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
|
||||
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
|
||||
total_sigops += ancestor.sigops;
|
||||
}
|
||||
|
||||
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {
|
||||
tx.set_ancestors(
|
||||
ancestors,
|
||||
total_fee,
|
||||
total_sigop_adjusted_weight,
|
||||
total_sigop_adjusted_vsize,
|
||||
total_sigops,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
fn update_descendants(
|
||||
root_txid: u32,
|
||||
audit_pool: &mut AuditPool,
|
||||
modified: &mut ModifiedQueue,
|
||||
cluster_rate: f64,
|
||||
) {
|
||||
let mut visited: HashSet<u32, U32HasherState> = u32hashset_new();
|
||||
let mut descendant_stack: Vec<u32> = Vec::new();
|
||||
let root_fee: u64;
|
||||
let root_sigop_adjusted_weight: u32;
|
||||
let root_sigop_adjusted_vsize: u32;
|
||||
let root_sigops: u32;
|
||||
if let Some(Some(root_tx)) = audit_pool.get(root_txid as usize) {
|
||||
for descendant_id in &root_tx.children {
|
||||
if !visited.contains(descendant_id) {
|
||||
descendant_stack.push(*descendant_id);
|
||||
visited.insert(*descendant_id);
|
||||
}
|
||||
}
|
||||
root_fee = root_tx.fee;
|
||||
root_sigop_adjusted_weight = root_tx.sigop_adjusted_weight;
|
||||
root_sigop_adjusted_vsize = root_tx.sigop_adjusted_vsize;
|
||||
root_sigops = root_tx.sigops;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
while let Some(next_txid) = descendant_stack.pop() {
|
||||
if let Some(Some(descendant)) = audit_pool.get_mut(next_txid as usize) {
|
||||
// remove root tx as ancestor
|
||||
let old_score = descendant.remove_root(
|
||||
root_txid,
|
||||
root_fee,
|
||||
root_sigop_adjusted_weight,
|
||||
root_sigop_adjusted_vsize,
|
||||
root_sigops,
|
||||
cluster_rate,
|
||||
);
|
||||
// add to priority queue or update priority if score has changed
|
||||
if descendant.score() < old_score {
|
||||
descendant.modified = true;
|
||||
modified.push_decrease(
|
||||
descendant.uid,
|
||||
TxPriority {
|
||||
uid: descendant.uid,
|
||||
order: descendant.order(),
|
||||
score: descendant.score(),
|
||||
},
|
||||
);
|
||||
} else if descendant.score() > old_score {
|
||||
descendant.modified = true;
|
||||
modified.push_increase(
|
||||
descendant.uid,
|
||||
TxPriority {
|
||||
uid: descendant.uid,
|
||||
order: descendant.order(),
|
||||
score: descendant.score(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
for child_id in &descendant.children {
|
||||
if !visited.contains(child_id) {
|
||||
descendant_stack.push(*child_id);
|
||||
visited.insert(*child_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
177
backend/rust-gbt/src/lib.rs
Normal file
177
backend/rust-gbt/src/lib.rs
Normal file
|
@ -0,0 +1,177 @@
|
|||
#![warn(clippy::all)]
|
||||
#![warn(clippy::pedantic)]
|
||||
#![warn(clippy::nursery)]
|
||||
#![allow(clippy::cast_precision_loss)]
|
||||
#![allow(clippy::cast_possible_truncation)]
|
||||
#![allow(clippy::cast_sign_loss)]
|
||||
#![allow(clippy::float_cmp)]
|
||||
|
||||
use napi::bindgen_prelude::Result;
|
||||
use napi_derive::napi;
|
||||
use thread_transaction::ThreadTransaction;
|
||||
use tracing::{debug, info, trace};
|
||||
use tracing_log::LogTracer;
|
||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
mod audit_transaction;
|
||||
mod gbt;
|
||||
mod thread_transaction;
|
||||
mod u32_hasher_types;
|
||||
|
||||
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
|
||||
|
||||
/// This is the initial capacity of the `GbtGenerator` struct's inner `HashMap`.
|
||||
///
|
||||
/// Note: This doesn't *have* to be a power of 2. (uwu)
|
||||
const STARTING_CAPACITY: usize = 1_048_576;
|
||||
|
||||
type ThreadTransactionsMap = HashMap<u32, ThreadTransaction, U32HasherState>;
|
||||
|
||||
#[napi]
|
||||
pub struct GbtGenerator {
|
||||
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
|
||||
}
|
||||
|
||||
#[napi::module_init]
|
||||
fn init() {
|
||||
// Set all `tracing` logs to print to STDOUT
|
||||
// Note: Passing RUST_LOG env variable to the node process
|
||||
// will change the log level for the rust module.
|
||||
tracing::subscriber::set_global_default(
|
||||
FmtSubscriber::builder()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.with_ansi(
|
||||
// Default to no-color logs.
|
||||
// Setting RUST_LOG_COLOR to 1 or true|TRUE|True etc.
|
||||
// will enable color
|
||||
std::env::var("RUST_LOG_COLOR")
|
||||
.map(|s| ["1", "true"].contains(&&*s.to_lowercase()))
|
||||
.unwrap_or(false),
|
||||
)
|
||||
.finish(),
|
||||
)
|
||||
.expect("Logging subscriber failed");
|
||||
// Convert all `log` logs into `tracing` events
|
||||
LogTracer::init().expect("Legacy log subscriber failed");
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GbtGenerator {
|
||||
#[napi(constructor)]
|
||||
#[allow(clippy::new_without_default)]
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
debug!("Created new GbtGenerator");
|
||||
Self {
|
||||
thread_transactions: Arc::new(Mutex::new(u32hashmap_with_capacity(STARTING_CAPACITY))),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Errors
|
||||
///
|
||||
/// Rejects if the thread panics or if the Mutex is poisoned.
|
||||
#[napi]
|
||||
pub async fn make(&self, mempool: Vec<ThreadTransaction>, max_uid: u32) -> Result<GbtResult> {
|
||||
trace!("make: Current State {:#?}", self.thread_transactions);
|
||||
run_task(
|
||||
Arc::clone(&self.thread_transactions),
|
||||
max_uid as usize,
|
||||
move |map| {
|
||||
for tx in mempool {
|
||||
map.insert(tx.uid, tx);
|
||||
}
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// # Errors
|
||||
///
|
||||
/// Rejects if the thread panics or if the Mutex is poisoned.
|
||||
#[napi]
|
||||
pub async fn update(
|
||||
&self,
|
||||
new_txs: Vec<ThreadTransaction>,
|
||||
remove_txs: Vec<u32>,
|
||||
max_uid: u32,
|
||||
) -> Result<GbtResult> {
|
||||
trace!("update: Current State {:#?}", self.thread_transactions);
|
||||
run_task(
|
||||
Arc::clone(&self.thread_transactions),
|
||||
max_uid as usize,
|
||||
move |map| {
|
||||
for tx in new_txs {
|
||||
map.insert(tx.uid, tx);
|
||||
}
|
||||
for txid in &remove_txs {
|
||||
map.remove(txid);
|
||||
}
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
/// The result from calling the gbt function.
|
||||
///
|
||||
/// This tuple contains the following:
|
||||
/// blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
|
||||
/// block_weights: A Vector of total weights per block.
|
||||
/// clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
|
||||
/// rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
|
||||
#[napi(constructor)]
|
||||
pub struct GbtResult {
|
||||
pub blocks: Vec<Vec<u32>>,
|
||||
pub block_weights: Vec<u32>,
|
||||
pub clusters: Vec<Vec<u32>>,
|
||||
pub rates: Vec<Vec<f64>>, // Tuples not supported. u32 fits inside f64
|
||||
}
|
||||
|
||||
/// All on another thread, this runs an arbitrary task in between
|
||||
/// taking the lock and running gbt.
|
||||
///
|
||||
/// Rather than filling / updating the `HashMap` on the main thread,
|
||||
/// this allows for `HashMap` modifying tasks to be run before running and returning gbt results.
|
||||
///
|
||||
/// `thread_transactions` is a cloned `Arc` of the `Mutex` for the `HashMap` state.
|
||||
/// `callback` is a `'static + Send` `FnOnce` closure/function that takes a mutable reference
|
||||
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
|
||||
async fn run_task<F>(
|
||||
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
|
||||
max_uid: usize,
|
||||
callback: F,
|
||||
) -> Result<GbtResult>
|
||||
where
|
||||
F: FnOnce(&mut ThreadTransactionsMap) + Send + 'static,
|
||||
{
|
||||
debug!("Spawning thread...");
|
||||
let handle = napi::tokio::task::spawn_blocking(move || {
|
||||
debug!(
|
||||
"Getting lock for thread_transactions from thread {:?}...",
|
||||
std::thread::current().id()
|
||||
);
|
||||
let mut map = thread_transactions
|
||||
.lock()
|
||||
.map_err(|_| napi::Error::from_reason("THREAD_TRANSACTIONS Mutex poisoned"))?;
|
||||
callback(&mut map);
|
||||
|
||||
info!("Starting gbt algorithm for {} elements...", map.len());
|
||||
let result = gbt::gbt(&mut map, max_uid);
|
||||
info!("Finished gbt algorithm for {} elements...", map.len());
|
||||
|
||||
debug!(
|
||||
"Releasing lock for thread_transactions from thread {:?}...",
|
||||
std::thread::current().id()
|
||||
);
|
||||
drop(map);
|
||||
|
||||
Ok(result)
|
||||
});
|
||||
|
||||
handle
|
||||
.await
|
||||
.map_err(|_| napi::Error::from_reason("thread panicked"))?
|
||||
}
|
13
backend/rust-gbt/src/thread_transaction.rs
Normal file
13
backend/rust-gbt/src/thread_transaction.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
use napi_derive::napi;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[napi(object)]
|
||||
pub struct ThreadTransaction {
|
||||
pub uid: u32,
|
||||
pub order: u32,
|
||||
pub fee: f64,
|
||||
pub weight: u32,
|
||||
pub sigops: u32,
|
||||
pub effective_fee_per_vsize: f64,
|
||||
pub inputs: Vec<u32>,
|
||||
}
|
132
backend/rust-gbt/src/u32_hasher_types.rs
Normal file
132
backend/rust-gbt/src/u32_hasher_types.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
use priority_queue::PriorityQueue;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Debug,
|
||||
hash::{BuildHasher, Hasher},
|
||||
};
|
||||
|
||||
/// This is the only way to create a `HashMap` with the `U32HasherState` and capacity
|
||||
pub fn u32hashmap_with_capacity<V>(capacity: usize) -> HashMap<u32, V, U32HasherState> {
|
||||
HashMap::with_capacity_and_hasher(capacity, U32HasherState(()))
|
||||
}
|
||||
|
||||
/// This is the only way to create a `PriorityQueue` with the `U32HasherState` and capacity
|
||||
pub fn u32priority_queue_with_capacity<V: Ord>(
|
||||
capacity: usize,
|
||||
) -> PriorityQueue<u32, V, U32HasherState> {
|
||||
PriorityQueue::with_capacity_and_hasher(capacity, U32HasherState(()))
|
||||
}
|
||||
|
||||
/// This is the only way to create a `HashSet` with the `U32HasherState`
|
||||
pub fn u32hashset_new() -> HashSet<u32, U32HasherState> {
|
||||
HashSet::with_hasher(U32HasherState(()))
|
||||
}
|
||||
|
||||
/// A private unit type is contained so no one can make an instance of it.
|
||||
#[derive(Clone)]
|
||||
pub struct U32HasherState(());
|
||||
|
||||
impl Debug for U32HasherState {
|
||||
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl BuildHasher for U32HasherState {
|
||||
type Hasher = U32Hasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
U32Hasher(0)
|
||||
}
|
||||
}
|
||||
|
||||
/// This also can't be created outside this module due to private field.
|
||||
pub struct U32Hasher(u32);
|
||||
|
||||
impl Hasher for U32Hasher {
|
||||
fn finish(&self) -> u64 {
|
||||
// Safety: Two u32s next to each other will make a u64
|
||||
bytemuck::cast([self.0, 0])
|
||||
}
|
||||
|
||||
fn write(&mut self, bytes: &[u8]) {
|
||||
// Assert in debug builds (testing too) that only 4 byte keys (u32, i32, f32, etc.) run
|
||||
debug_assert!(bytes.len() == 4);
|
||||
// Safety: We know that the size of the key is 4 bytes
|
||||
// We also know that the only way to get an instance of HashMap using this "hasher"
|
||||
// is through the public functions in this module which set the key type to u32.
|
||||
self.0 = *bytemuck::from_bytes(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::U32HasherState;
|
||||
use priority_queue::PriorityQueue;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_hashmap() {
|
||||
let mut hm: HashMap<u32, String, U32HasherState> = HashMap::with_hasher(U32HasherState(()));
|
||||
|
||||
// Testing basic operations with the custom hasher
|
||||
hm.insert(0, String::from("0"));
|
||||
hm.insert(42, String::from("42"));
|
||||
hm.insert(256, String::from("256"));
|
||||
hm.insert(u32::MAX, String::from("MAX"));
|
||||
hm.insert(u32::MAX >> 2, String::from("MAX >> 2"));
|
||||
|
||||
assert_eq!(hm.get(&0), Some(&String::from("0")));
|
||||
assert_eq!(hm.get(&42), Some(&String::from("42")));
|
||||
assert_eq!(hm.get(&256), Some(&String::from("256")));
|
||||
assert_eq!(hm.get(&u32::MAX), Some(&String::from("MAX")));
|
||||
assert_eq!(hm.get(&(u32::MAX >> 2)), Some(&String::from("MAX >> 2")));
|
||||
assert_eq!(hm.get(&(u32::MAX >> 4)), None);
|
||||
assert_eq!(hm.get(&3), None);
|
||||
assert_eq!(hm.get(&43), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_priority_queue() {
|
||||
let mut pq: PriorityQueue<u32, i32, U32HasherState> =
|
||||
PriorityQueue::with_hasher(U32HasherState(()));
|
||||
|
||||
// Testing basic operations with the custom hasher
|
||||
assert_eq!(pq.push(1, 5), None);
|
||||
assert_eq!(pq.push(2, -10), None);
|
||||
assert_eq!(pq.push(3, 7), None);
|
||||
assert_eq!(pq.push(4, 20), None);
|
||||
assert_eq!(pq.push(u32::MAX, -42), None);
|
||||
|
||||
assert_eq!(pq.push_increase(1, 4), Some(4));
|
||||
assert_eq!(pq.push_increase(2, -8), Some(-10));
|
||||
assert_eq!(pq.push_increase(3, 5), Some(5));
|
||||
assert_eq!(pq.push_increase(4, 21), Some(20));
|
||||
assert_eq!(pq.push_increase(u32::MAX, -99), Some(-99));
|
||||
assert_eq!(pq.push_increase(42, 1337), None);
|
||||
|
||||
assert_eq!(pq.push_decrease(1, 4), Some(5));
|
||||
assert_eq!(pq.push_decrease(2, -10), Some(-8));
|
||||
assert_eq!(pq.push_decrease(3, 5), Some(7));
|
||||
assert_eq!(pq.push_decrease(4, 20), Some(21));
|
||||
assert_eq!(pq.push_decrease(u32::MAX, 100), Some(100));
|
||||
assert_eq!(pq.push_decrease(69, 420), None);
|
||||
|
||||
assert_eq!(pq.peek(), Some((&42, &1337)));
|
||||
assert_eq!(pq.pop(), Some((42, 1337)));
|
||||
assert_eq!(pq.peek(), Some((&69, &420)));
|
||||
assert_eq!(pq.pop(), Some((69, 420)));
|
||||
assert_eq!(pq.peek(), Some((&4, &20)));
|
||||
assert_eq!(pq.pop(), Some((4, 20)));
|
||||
assert_eq!(pq.peek(), Some((&3, &5)));
|
||||
assert_eq!(pq.pop(), Some((3, 5)));
|
||||
assert_eq!(pq.peek(), Some((&1, &4)));
|
||||
assert_eq!(pq.pop(), Some((1, 4)));
|
||||
assert_eq!(pq.peek(), Some((&2, &-10)));
|
||||
assert_eq!(pq.pop(), Some((2, -10)));
|
||||
assert_eq!(pq.peek(), Some((&u32::MAX, &-42)));
|
||||
assert_eq!(pq.pop(), Some((u32::MAX, -42)));
|
||||
assert_eq!(pq.peek(), None);
|
||||
assert_eq!(pq.pop(), None);
|
||||
}
|
||||
}
|
|
@ -27,9 +27,12 @@
|
|||
"AUDIT": true,
|
||||
"ADVANCED_GBT_AUDIT": true,
|
||||
"ADVANCED_GBT_MEMPOOL": true,
|
||||
"RUST_GBT": false,
|
||||
"CPFP_INDEXING": true,
|
||||
"MAX_BLOCKS_BULK_QUERY": 999,
|
||||
"DISK_CACHE_BLOCK_INTERVAL": 999
|
||||
"DISK_CACHE_BLOCK_INTERVAL": 999,
|
||||
"MAX_PUSH_TX_SIZE_WEIGHT": 4000000,
|
||||
"ALLOW_UNREACHABLE": true
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
@ -118,5 +121,11 @@
|
|||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "__CLIGHTNING_SOCKET__"
|
||||
},
|
||||
"REPLICATION": {
|
||||
"ENABLED": false,
|
||||
"AUDIT": false,
|
||||
"AUDIT_START_HEIGHT": 774000,
|
||||
"SERVERS": []
|
||||
}
|
||||
}
|
|
@ -40,9 +40,12 @@ describe('Mempool Backend Config', () => {
|
|||
AUDIT: false,
|
||||
ADVANCED_GBT_AUDIT: false,
|
||||
ADVANCED_GBT_MEMPOOL: false,
|
||||
RUST_GBT: false,
|
||||
CPFP_INDEXING: false,
|
||||
MAX_BLOCKS_BULK_QUERY: 0,
|
||||
DISK_CACHE_BLOCK_INTERVAL: 6,
|
||||
MAX_PUSH_TX_SIZE_WEIGHT: 400000,
|
||||
ALLOW_UNREACHABLE: true,
|
||||
});
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
|
||||
|
@ -117,6 +120,13 @@ describe('Mempool Backend Config', () => {
|
|||
GEOLITE2_ASN: '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
|
||||
GEOIP2_ISP: '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
|
||||
});
|
||||
|
||||
expect(config.REPLICATION).toStrictEqual({
|
||||
ENABLED: false,
|
||||
AUDIT: false,
|
||||
AUDIT_START_HEIGHT: 774000,
|
||||
SERVERS: []
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
68
backend/src/__tests__/gbt/gbt-tests.ts
Normal file
68
backend/src/__tests__/gbt/gbt-tests.ts
Normal file
|
@ -0,0 +1,68 @@
|
|||
import fs from 'fs';
|
||||
import { GbtGenerator, ThreadTransaction } from '../../../rust-gbt';
|
||||
import path from 'path';
|
||||
|
||||
const baseline = require('./test-data/target-template.json');
|
||||
const testVector = require('./test-data/test-data-ids.json');
|
||||
const vectorUidMap: Map<number, string> = new Map(testVector.map(x => [x[0], x[1]]));
|
||||
const vectorTxidMap: Map<string, number> = new Map(testVector.map(x => [x[1], x[0]]));
|
||||
// Note that this test buffer is specially constructed
|
||||
// such that uids are assigned in numerical txid order
|
||||
// so that ties break the same way as in Core's implementation
|
||||
const vectorBuffer: Buffer = fs.readFileSync(path.join(__dirname, './', './test-data/test-buffer.bin'));
|
||||
|
||||
describe('Rust GBT', () => {
|
||||
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
|
||||
const rustGbt = new GbtGenerator();
|
||||
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
|
||||
const result = await rustGbt.make(mempool, maxUid);
|
||||
|
||||
const blocks: [string, number][][] = result.blocks.map(block => {
|
||||
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);
|
||||
});
|
||||
const template = baseline.map(tx => [tx.txid, vectorTxidMap.get(tx.txid)]);
|
||||
|
||||
expect(blocks[0].length).toEqual(baseline.length);
|
||||
expect(blocks[0]).toEqual(template);
|
||||
});
|
||||
});
|
||||
|
||||
function mempoolFromArrayBuffer(buf: ArrayBuffer): { mempool: ThreadTransaction[], maxUid: number } {
|
||||
let maxUid = 0;
|
||||
const view = new DataView(buf);
|
||||
const count = view.getUint32(0, false);
|
||||
const txs: ThreadTransaction[] = [];
|
||||
let offset = 4;
|
||||
for (let i = 0; i < count; i++) {
|
||||
const uid = view.getUint32(offset, false);
|
||||
maxUid = Math.max(maxUid, uid);
|
||||
const tx: ThreadTransaction = {
|
||||
uid,
|
||||
order: txidToOrdering(vectorUidMap.get(uid) as string),
|
||||
fee: view.getFloat64(offset + 4, false),
|
||||
weight: view.getUint32(offset + 12, false),
|
||||
sigops: view.getUint32(offset + 16, false),
|
||||
// feePerVsize: view.getFloat64(offset + 20, false),
|
||||
effectiveFeePerVsize: view.getFloat64(offset + 28, false),
|
||||
inputs: [],
|
||||
};
|
||||
const numInputs = view.getUint32(offset + 36, false);
|
||||
offset += 40;
|
||||
for (let j = 0; j < numInputs; j++) {
|
||||
tx.inputs.push(view.getUint32(offset, false));
|
||||
offset += 4;
|
||||
}
|
||||
txs.push(tx);
|
||||
}
|
||||
return { mempool: txs, maxUid };
|
||||
}
|
||||
|
||||
function txidToOrdering(txid: string): number {
|
||||
return parseInt(
|
||||
txid.substr(62, 2) +
|
||||
txid.substr(60, 2) +
|
||||
txid.substr(58, 2) +
|
||||
txid.substr(56, 2),
|
||||
16
|
||||
);
|
||||
}
|
7070
backend/src/__tests__/gbt/test-data/target-template.json
Normal file
7070
backend/src/__tests__/gbt/test-data/target-template.json
Normal file
File diff suppressed because it is too large
Load diff
BIN
backend/src/__tests__/gbt/test-data/test-buffer.bin
Normal file
BIN
backend/src/__tests__/gbt/test-data/test-buffer.bin
Normal file
Binary file not shown.
1
backend/src/__tests__/gbt/test-data/test-data-ids.json
Normal file
1
backend/src/__tests__/gbt/test-data/test-data-ids.json
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,19 +1,21 @@
|
|||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import { TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
|
||||
import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
|
||||
import rbfCache from './rbf-cache';
|
||||
|
||||
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
|
||||
|
||||
class Audit {
|
||||
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
|
||||
: { censored: string[], added: string[], fresh: string[], sigop: string[], score: number, similarity: number } {
|
||||
auditBlock(transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended })
|
||||
: { censored: string[], added: string[], fresh: string[], sigop: string[], fullrbf: string[], score: number, similarity: number } {
|
||||
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
|
||||
return { censored: [], added: [], fresh: [], sigop: [], score: 0, similarity: 1 };
|
||||
return { censored: [], added: [], fresh: [], sigop: [], fullrbf: [], score: 0, similarity: 1 };
|
||||
}
|
||||
|
||||
const matches: string[] = []; // present in both mined block and template
|
||||
const added: string[] = []; // present in mined block, not in template
|
||||
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
|
||||
const fresh: string[] = []; // missing, but firstSeen or lastBoosted within PROPAGATION_MARGIN
|
||||
const rbf: string[] = []; // either missing or present, and either part of a full-rbf replacement, or a conflict with the mined block
|
||||
const isCensored = {}; // missing, without excuse
|
||||
const isDisplaced = {};
|
||||
let displacedWeight = 0;
|
||||
|
@ -34,8 +36,14 @@ class Audit {
|
|||
// look for transactions that were expected in the template, but missing from the mined block
|
||||
for (const txid of projectedBlocks[0].transactionIds) {
|
||||
if (!inBlock[txid]) {
|
||||
// tx is recent, may have reached the miner too late for inclusion
|
||||
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
|
||||
// allow missing transactions which either belong to a full rbf tree, or conflict with any transaction in the mined block
|
||||
if (rbfCache.has(txid) && (rbfCache.isFullRbf(txid) || rbfCache.anyInSameTree(txid, (tx) => inBlock[tx.txid]))) {
|
||||
rbf.push(txid);
|
||||
} else if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
|
||||
// tx is recent, may have reached the miner too late for inclusion
|
||||
fresh.push(txid);
|
||||
} else if (mempool[txid]?.lastBoosted != null && (now - (mempool[txid]?.lastBoosted || 0)) <= PROPAGATION_MARGIN) {
|
||||
// tx was recently cpfp'd, miner may not have the latest effective rate
|
||||
fresh.push(txid);
|
||||
} else {
|
||||
isCensored[txid] = true;
|
||||
|
@ -91,7 +99,9 @@ class Audit {
|
|||
if (inTemplate[tx.txid]) {
|
||||
matches.push(tx.txid);
|
||||
} else {
|
||||
if (!isDisplaced[tx.txid]) {
|
||||
if (rbfCache.has(tx.txid)) {
|
||||
rbf.push(tx.txid);
|
||||
} else if (!isDisplaced[tx.txid]) {
|
||||
added.push(tx.txid);
|
||||
}
|
||||
overflowWeight += tx.weight;
|
||||
|
@ -138,6 +148,7 @@ class Audit {
|
|||
added,
|
||||
fresh,
|
||||
sigop: [],
|
||||
fullrbf: rbf,
|
||||
score,
|
||||
similarity,
|
||||
};
|
||||
|
|
|
@ -7,7 +7,6 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
|
|||
import { BisqBlocks, BisqBlock, BisqTransaction, BisqStats, BisqTrade } from './interfaces';
|
||||
import { Common } from '../common';
|
||||
import { BlockExtended } from '../../mempool.interfaces';
|
||||
import { StaticPool } from 'node-worker-threads-pool';
|
||||
import backendInfo from '../backend-info';
|
||||
import logger from '../../logger';
|
||||
|
||||
|
@ -31,10 +30,6 @@ class Bisq {
|
|||
private priceUpdateCallbackFunction: ((price: number) => void) | undefined;
|
||||
private topDirectoryWatcher: fs.FSWatcher | undefined;
|
||||
private subdirectoryWatcher: fs.FSWatcher | undefined;
|
||||
private jsonParsePool = new StaticPool({
|
||||
size: 4,
|
||||
task: (blob: string) => JSON.parse(blob),
|
||||
});
|
||||
|
||||
constructor() {}
|
||||
|
||||
|
|
|
@ -3,10 +3,12 @@ import { IEsploraApi } from './esplora-api.interface';
|
|||
export interface AbstractBitcoinApi {
|
||||
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
|
||||
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
|
||||
$getMempoolTransactions(lastTxid: string);
|
||||
$getTransactionHex(txId: string): Promise<string>;
|
||||
$getBlockHeightTip(): Promise<number>;
|
||||
$getBlockHashTip(): Promise<string>;
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]>;
|
||||
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$getBlockHash(height: number): Promise<string>;
|
||||
$getBlockHeader(hash: string): Promise<string>;
|
||||
$getBlock(hash: string): Promise<IEsploraApi.Block>;
|
||||
|
@ -14,6 +16,8 @@ export interface AbstractBitcoinApi {
|
|||
$getAddress(address: string): Promise<IEsploraApi.Address>;
|
||||
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$getAddressPrefix(prefix: string): string[];
|
||||
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash>;
|
||||
$getScriptHashTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$sendRawTransaction(rawTransaction: string): Promise<string>;
|
||||
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
|
||||
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
|
||||
|
|
|
@ -29,6 +29,7 @@ class BitcoinApi implements AbstractBitcoinApi {
|
|||
weight: block.weight,
|
||||
previousblockhash: block.previousblockhash,
|
||||
mediantime: block.mediantime,
|
||||
stale: block.confirmations === -1,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -58,23 +59,21 @@ class BitcoinApi implements AbstractBitcoinApi {
|
|||
});
|
||||
}
|
||||
|
||||
$getMempoolTransactions(lastTxid: string): Promise<IEsploraApi.Transaction[]> {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
$getTransactionHex(txId: string): Promise<string> {
|
||||
return this.$getRawTransaction(txId, true)
|
||||
.then((tx) => tx.hex || '');
|
||||
}
|
||||
|
||||
$getBlockHeightTip(): Promise<number> {
|
||||
return this.bitcoindClient.getChainTips()
|
||||
.then((result: IBitcoinApi.ChainTips[]) => {
|
||||
return result.find(tip => tip.status === 'active')!.height;
|
||||
});
|
||||
return this.bitcoindClient.getBlockCount();
|
||||
}
|
||||
|
||||
$getBlockHashTip(): Promise<string> {
|
||||
return this.bitcoindClient.getChainTips()
|
||||
.then((result: IBitcoinApi.ChainTips[]) => {
|
||||
return result.find(tip => tip.status === 'active')!.hash;
|
||||
});
|
||||
return this.bitcoindClient.getBestBlockHash();
|
||||
}
|
||||
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]> {
|
||||
|
@ -82,6 +81,10 @@ class BitcoinApi implements AbstractBitcoinApi {
|
|||
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
|
||||
}
|
||||
|
||||
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
|
||||
throw new Error('Method getTxsForBlock not supported by the Bitcoin RPC API.');
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<Buffer> {
|
||||
return this.bitcoindClient.getBlock(hash, 0)
|
||||
.then((raw: string) => Buffer.from(raw, "hex"));
|
||||
|
@ -113,6 +116,14 @@ class BitcoinApi implements AbstractBitcoinApi {
|
|||
throw new Error('Method getAddressTransactions not supported by the Bitcoin RPC API.');
|
||||
}
|
||||
|
||||
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
|
||||
throw new Error('Method getScriptHash not supported by the Bitcoin RPC API.');
|
||||
}
|
||||
|
||||
$getScriptHashTransactions(scripthash: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
|
||||
throw new Error('Method getScriptHashTransactions not supported by the Bitcoin RPC API.');
|
||||
}
|
||||
|
||||
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
|
||||
return this.bitcoindClient.getRawMemPool();
|
||||
}
|
||||
|
|
|
@ -121,7 +121,8 @@ class BitcoinRoutes {
|
|||
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash', this.getScriptHash)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'scripthash/:scripthash/txs', this.getScriptHashTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
|
||||
;
|
||||
}
|
||||
|
@ -399,9 +400,13 @@ class BitcoinRoutes {
|
|||
|
||||
private async getBlockAuditSummary(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
const auditSummary = await blocks.$getBlockAuditSummary(req.params.hash);
|
||||
if (auditSummary) {
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(auditSummary);
|
||||
} else {
|
||||
return res.status(404).send(`audit not available`);
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
|
@ -542,15 +547,37 @@ class BitcoinRoutes {
|
|||
}
|
||||
}
|
||||
|
||||
private async getAddressTransactions(req: Request, res: Response) {
|
||||
private async getAddressTransactions(req: Request, res: Response): Promise<void> {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
|
||||
let lastTxId: string = '';
|
||||
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
|
||||
lastTxId = req.query.after_txid;
|
||||
}
|
||||
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, lastTxId);
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
res.status(413).send(e instanceof Error ? e.message : e);
|
||||
return;
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getScriptHash(req: Request, res: Response) {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const addressData = await bitcoinApi.$getScriptHash(req.params.address);
|
||||
res.json(addressData);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
return res.status(413).send(e instanceof Error ? e.message : e);
|
||||
|
@ -559,8 +586,26 @@ class BitcoinRoutes {
|
|||
}
|
||||
}
|
||||
|
||||
private async getAdressTxChain(req: Request, res: Response) {
|
||||
res.status(501).send('Not implemented');
|
||||
private async getScriptHashTransactions(req: Request, res: Response): Promise<void> {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let lastTxId: string = '';
|
||||
if (req.query.after_txid && typeof req.query.after_txid === 'string') {
|
||||
lastTxId = req.query.after_txid;
|
||||
}
|
||||
const transactions = await bitcoinApi.$getScriptHashTransactions(req.params.address, lastTxId);
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
res.status(413).send(e instanceof Error ? e.message : e);
|
||||
return;
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAddressPrefix(req: Request, res: Response) {
|
||||
|
@ -719,12 +764,7 @@ class BitcoinRoutes {
|
|||
private async $postTransaction(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
try {
|
||||
let rawTx;
|
||||
if (typeof req.body === 'object') {
|
||||
rawTx = Object.keys(req.body)[0];
|
||||
} else {
|
||||
rawTx = req.body;
|
||||
}
|
||||
const rawTx = Common.getTransactionFromRequest(req, false);
|
||||
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
|
@ -735,12 +775,8 @@ class BitcoinRoutes {
|
|||
|
||||
private async $postTransactionForm(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
const matches = /tx=([a-z0-9]+)/.exec(req.body);
|
||||
let txHex = '';
|
||||
if (matches && matches[1]) {
|
||||
txHex = matches[1];
|
||||
}
|
||||
try {
|
||||
const txHex = Common.getTransactionFromRequest(req, true);
|
||||
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
|
|
|
@ -126,6 +126,77 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
|
|||
}
|
||||
}
|
||||
|
||||
async $getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
|
||||
try {
|
||||
const balance = await this.electrumClient.blockchainScripthash_getBalance(scripthash);
|
||||
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
|
||||
if (!history) {
|
||||
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
|
||||
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
|
||||
}
|
||||
|
||||
const unconfirmed = history ? history.filter((h) => h.fee).length : 0;
|
||||
|
||||
return {
|
||||
'scripthash': scripthash,
|
||||
'chain_stats': {
|
||||
'funded_txo_count': 0,
|
||||
'funded_txo_sum': balance.confirmed ? balance.confirmed : 0,
|
||||
'spent_txo_count': 0,
|
||||
'spent_txo_sum': balance.confirmed < 0 ? balance.confirmed : 0,
|
||||
'tx_count': (history?.length || 0) - unconfirmed,
|
||||
},
|
||||
'mempool_stats': {
|
||||
'funded_txo_count': 0,
|
||||
'funded_txo_sum': balance.unconfirmed > 0 ? balance.unconfirmed : 0,
|
||||
'spent_txo_count': 0,
|
||||
'spent_txo_sum': balance.unconfirmed < 0 ? -balance.unconfirmed : 0,
|
||||
'tx_count': unconfirmed,
|
||||
},
|
||||
'electrum': true,
|
||||
};
|
||||
} catch (e: any) {
|
||||
throw new Error(typeof e === 'string' ? e : e && e.message || e);
|
||||
}
|
||||
}
|
||||
|
||||
async $getScriptHashTransactions(scripthash: string, lastSeenTxId?: string): Promise<IEsploraApi.Transaction[]> {
|
||||
try {
|
||||
loadingIndicators.setProgress('address-' + scripthash, 0);
|
||||
|
||||
const transactions: IEsploraApi.Transaction[] = [];
|
||||
let history = memoryCache.get<IElectrumApi.ScriptHashHistory[]>('Scripthash_getHistory', scripthash);
|
||||
if (!history) {
|
||||
history = await this.electrumClient.blockchainScripthash_getHistory(scripthash);
|
||||
memoryCache.set('Scripthash_getHistory', scripthash, history, 2);
|
||||
}
|
||||
if (!history) {
|
||||
throw new Error('failed to get scripthash history');
|
||||
}
|
||||
history.sort((a, b) => (b.height || 9999999) - (a.height || 9999999));
|
||||
|
||||
let startingIndex = 0;
|
||||
if (lastSeenTxId) {
|
||||
const pos = history.findIndex((historicalTx) => historicalTx.tx_hash === lastSeenTxId);
|
||||
if (pos) {
|
||||
startingIndex = pos + 1;
|
||||
}
|
||||
}
|
||||
const endIndex = Math.min(startingIndex + 10, history.length);
|
||||
|
||||
for (let i = startingIndex; i < endIndex; i++) {
|
||||
const tx = await this.$getRawTransaction(history[i].tx_hash, false, true);
|
||||
transactions.push(tx);
|
||||
loadingIndicators.setProgress('address-' + scripthash, (i + 1) / endIndex * 100);
|
||||
}
|
||||
|
||||
return transactions;
|
||||
} catch (e: any) {
|
||||
loadingIndicators.setProgress('address-' + scripthash, 100);
|
||||
throw new Error(typeof e === 'string' ? e : e && e.message || e);
|
||||
}
|
||||
}
|
||||
|
||||
private $getScriptHashBalance(scriptHash: string): Promise<IElectrumApi.ScriptHashBalance> {
|
||||
return this.electrumClient.blockchainScripthash_getBalance(this.encodeScriptHash(scriptHash));
|
||||
}
|
||||
|
|
|
@ -89,6 +89,7 @@ export namespace IEsploraApi {
|
|||
weight: number;
|
||||
previousblockhash: string;
|
||||
mediantime: number;
|
||||
stale: boolean;
|
||||
}
|
||||
|
||||
export interface Address {
|
||||
|
@ -98,6 +99,13 @@ export namespace IEsploraApi {
|
|||
electrum?: boolean;
|
||||
}
|
||||
|
||||
export interface ScriptHash {
|
||||
scripthash: string;
|
||||
chain_stats: ChainStats;
|
||||
mempool_stats: MempoolStats;
|
||||
electrum?: boolean;
|
||||
}
|
||||
|
||||
export interface ChainStats {
|
||||
funded_txo_count: number;
|
||||
funded_txo_sum: number;
|
||||
|
|
|
@ -69,6 +69,10 @@ class ElectrsApi implements AbstractBitcoinApi {
|
|||
return this.$queryWrapper<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId);
|
||||
}
|
||||
|
||||
async $getMempoolTransactions(lastSeenTxid?: string): Promise<IEsploraApi.Transaction[]> {
|
||||
return this.$queryWrapper<IEsploraApi.Transaction[]>(config.ESPLORA.REST_API_URL + '/mempool/txs' + (lastSeenTxid ? '/' + lastSeenTxid : ''));
|
||||
}
|
||||
|
||||
$getTransactionHex(txId: string): Promise<string> {
|
||||
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex');
|
||||
}
|
||||
|
@ -85,6 +89,10 @@ class ElectrsApi implements AbstractBitcoinApi {
|
|||
return this.$queryWrapper<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids');
|
||||
}
|
||||
|
||||
$getTxsForBlock(hash: string): Promise<IEsploraApi.Transaction[]> {
|
||||
return this.$queryWrapper<IEsploraApi.Transaction[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txs');
|
||||
}
|
||||
|
||||
$getBlockHash(height: number): Promise<string> {
|
||||
return this.$queryWrapper<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height);
|
||||
}
|
||||
|
@ -110,6 +118,14 @@ class ElectrsApi implements AbstractBitcoinApi {
|
|||
throw new Error('Method getAddressTransactions not implemented.');
|
||||
}
|
||||
|
||||
$getScriptHash(scripthash: string): Promise<IEsploraApi.ScriptHash> {
|
||||
throw new Error('Method getScriptHash not implemented.');
|
||||
}
|
||||
|
||||
$getScriptHashTransactions(scripthash: string, txId?: string): Promise<IEsploraApi.Transaction[]> {
|
||||
throw new Error('Method getScriptHashTransactions not implemented.');
|
||||
}
|
||||
|
||||
$getAddressPrefix(prefix: string): string[] {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmen
|
|||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
import chainTips from './chain-tips';
|
||||
import websocketHandler from './websocket-handler';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
|
@ -34,7 +35,7 @@ class Blocks {
|
|||
private lastDifficultyAdjustmentTime = 0;
|
||||
private previousDifficultyRetarget = 0;
|
||||
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
|
||||
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
|
||||
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>)[] = [];
|
||||
|
||||
private mainLoopTimeout: number = 120000;
|
||||
|
||||
|
@ -60,7 +61,7 @@ class Blocks {
|
|||
this.newBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
|
||||
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>) {
|
||||
this.newAsyncBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
|
@ -69,69 +70,90 @@ class Blocks {
|
|||
* @param blockHash
|
||||
* @param blockHeight
|
||||
* @param onlyCoinbase - Set to true if you only need the coinbase transaction
|
||||
* @param txIds - optional ordered list of transaction ids if already known
|
||||
* @param quiet - don't print non-essential logs
|
||||
* @param addMempoolData - calculate sigops etc
|
||||
* @returns Promise<TransactionExtended[]>
|
||||
*/
|
||||
private async $getTransactionsExtended(
|
||||
blockHash: string,
|
||||
blockHeight: number,
|
||||
onlyCoinbase: boolean,
|
||||
txIds: string[] | null = null,
|
||||
quiet: boolean = false,
|
||||
addMempoolData: boolean = false,
|
||||
): Promise<TransactionExtended[]> {
|
||||
const transactions: TransactionExtended[] = [];
|
||||
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
|
||||
const isEsplora = config.MEMPOOL.BACKEND === 'esplora';
|
||||
const transactionMap: { [txid: string]: TransactionExtended } = {};
|
||||
|
||||
if (!txIds) {
|
||||
txIds = await bitcoinApi.$getTxIdsForBlock(blockHash);
|
||||
}
|
||||
|
||||
const mempool = memPool.getMempool();
|
||||
let transactionsFound = 0;
|
||||
let transactionsFetched = 0;
|
||||
let foundInMempool = 0;
|
||||
let totalFound = 0;
|
||||
|
||||
for (let i = 0; i < txIds.length; i++) {
|
||||
if (mempool[txIds[i]]) {
|
||||
// We update blocks before the mempool (index.ts), therefore we can
|
||||
// optimize here by directly fetching txs in the "outdated" mempool
|
||||
transactions.push(mempool[txIds[i]]);
|
||||
transactionsFound++;
|
||||
} else if (config.MEMPOOL.BACKEND === 'esplora' || !memPool.hasPriority() || i === 0) {
|
||||
// Otherwise we fetch the tx data through backend services (esplora, electrum, core rpc...)
|
||||
if (!quiet && (i % (Math.round((txIds.length) / 10)) === 0 || i + 1 === txIds.length)) { // Avoid log spam
|
||||
logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`);
|
||||
}
|
||||
try {
|
||||
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, false, addMempoolData);
|
||||
transactions.push(tx);
|
||||
transactionsFetched++;
|
||||
} catch (e) {
|
||||
try {
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
// Try again with core
|
||||
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true, addMempoolData);
|
||||
transactions.push(tx);
|
||||
transactionsFetched++;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
|
||||
logger.err(msg);
|
||||
throw new Error(msg);
|
||||
} else {
|
||||
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
// Copy existing transactions from the mempool
|
||||
if (!onlyCoinbase) {
|
||||
for (const txid of txIds) {
|
||||
if (mempool[txid]) {
|
||||
transactionMap[txid] = mempool[txid];
|
||||
foundInMempool++;
|
||||
totalFound++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (onlyCoinbase === true) {
|
||||
break; // Fetch the first transaction and exit
|
||||
// Skip expensive lookups while mempool has priority
|
||||
if (onlyCoinbase) {
|
||||
try {
|
||||
const coinbase = await transactionUtils.$getTransactionExtended(txIds[0], false, false, false, addMempoolData);
|
||||
return [coinbase];
|
||||
} catch (e) {
|
||||
const msg = `Cannot fetch coinbase tx ${txIds[0]}. Reason: ` + (e instanceof Error ? e.message : e);
|
||||
logger.err(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch remaining txs in bulk
|
||||
if (isEsplora && (txIds.length - totalFound > 500)) {
|
||||
try {
|
||||
const rawTransactions = await bitcoinApi.$getTxsForBlock(blockHash);
|
||||
for (const tx of rawTransactions) {
|
||||
if (!transactionMap[tx.txid]) {
|
||||
transactionMap[tx.txid] = addMempoolData ? transactionUtils.extendMempoolTransaction(tx) : transactionUtils.extendTransaction(tx);
|
||||
totalFound++;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch bulk txs for block ${blockHash}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch remaining txs individually
|
||||
for (const txid of txIds.filter(txid => !transactionMap[txid])) {
|
||||
if (!transactionMap[txid]) {
|
||||
if (!quiet && (totalFound % (Math.round((txIds.length) / 10)) === 0 || totalFound + 1 === txIds.length)) { // Avoid log spam
|
||||
logger.debug(`Indexing tx ${totalFound + 1} of ${txIds.length} in block #${blockHeight}`);
|
||||
}
|
||||
try {
|
||||
const tx = await transactionUtils.$getTransactionExtended(txid, false, false, false, addMempoolData);
|
||||
transactionMap[txid] = tx;
|
||||
totalFound++;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch tx ${txid}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!quiet) {
|
||||
logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`);
|
||||
logger.debug(`${foundInMempool} of ${txIds.length} found in mempool. ${totalFound - foundInMempool} fetched through backend service.`);
|
||||
}
|
||||
|
||||
return transactions;
|
||||
// Return list of transactions, preserving block order
|
||||
return txIds.map(txid => transactionMap[txid]).filter(tx => tx != null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -158,9 +180,18 @@ class Blocks {
|
|||
};
|
||||
}
|
||||
|
||||
public summarizeBlockTransactions(hash: string, transactions: TransactionExtended[]): BlockSummary {
|
||||
return {
|
||||
id: hash,
|
||||
transactions: Common.stripTransactions(transactions),
|
||||
};
|
||||
}
|
||||
|
||||
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
|
||||
block.tx.forEach(tx => {
|
||||
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
|
||||
if (!isFinite(Number(tx.fee))) {
|
||||
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
|
||||
}
|
||||
});
|
||||
return block;
|
||||
}
|
||||
|
@ -546,7 +577,7 @@ class Blocks {
|
|||
}
|
||||
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
|
||||
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, null, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
|
||||
newlyIndexed++;
|
||||
|
@ -578,7 +609,7 @@ class Blocks {
|
|||
|
||||
let fastForwarded = false;
|
||||
let handledBlocks = 0;
|
||||
const blockHeightTip = await bitcoinApi.$getBlockHeightTip();
|
||||
const blockHeightTip = await bitcoinCoreApi.$getBlockHeightTip();
|
||||
this.updateTimerProgress(timer, 'got block height tip');
|
||||
|
||||
if (this.blocks.length === 0) {
|
||||
|
@ -631,11 +662,11 @@ class Blocks {
|
|||
}
|
||||
|
||||
this.updateTimerProgress(timer, `getting block data for ${this.currentBlockHeight}`);
|
||||
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
|
||||
const blockHash = await bitcoinCoreApi.$getBlockHash(this.currentBlockHeight);
|
||||
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
|
||||
const block = BitcoinApi.convertBlock(verboseBlock);
|
||||
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, false, true);
|
||||
const txIds: string[] = verboseBlock.tx.map(tx => tx.txid);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, txIds, false, true) as MempoolTransactionExtended[];
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
// fill in missing transaction fee data from verboseBlock
|
||||
for (let i = 0; i < transactions.length; i++) {
|
||||
|
@ -646,13 +677,9 @@ class Blocks {
|
|||
}
|
||||
const cpfpSummary: CpfpSummary = Common.calculateCpfp(block.height, transactions);
|
||||
const blockExtended: BlockExtended = await this.$getBlockExtended(block, cpfpSummary.transactions);
|
||||
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
|
||||
const blockSummary: BlockSummary = this.summarizeBlockTransactions(block.id, cpfpSummary.transactions);
|
||||
this.updateTimerProgress(timer, `got block data for ${this.currentBlockHeight}`);
|
||||
|
||||
// start async callbacks
|
||||
this.updateTimerProgress(timer, `starting async callbacks for ${this.currentBlockHeight}`);
|
||||
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
if (!fastForwarded) {
|
||||
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
|
||||
|
@ -664,22 +691,27 @@ class Blocks {
|
|||
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
|
||||
this.blocks = this.blocks.slice(0, -10);
|
||||
this.updateTimerProgress(timer, `rolled back chain divergence from ${this.currentBlockHeight}`);
|
||||
for (let i = 10; i >= 0; --i) {
|
||||
const newBlock = await this.$indexBlock(lastBlock.height - i);
|
||||
this.blocks.push(newBlock);
|
||||
this.updateTimerProgress(timer, `reindexed block`);
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
|
||||
this.updateTimerProgress(timer, `reindexed block summary`);
|
||||
let cpfpSummary;
|
||||
if (config.MEMPOOL.CPFP_INDEXING) {
|
||||
await this.$indexCPFP(newBlock.id, lastBlock.height - i);
|
||||
cpfpSummary = await this.$indexCPFP(newBlock.id, lastBlock.height - i);
|
||||
this.updateTimerProgress(timer, `reindexed block cpfp`);
|
||||
}
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true, cpfpSummary, newBlock.height);
|
||||
this.updateTimerProgress(timer, `reindexed block summary`);
|
||||
}
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
|
||||
this.updateTimerProgress(timer, `reindexed difficulty adjustments`);
|
||||
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`, logger.tags.mining);
|
||||
indexer.reindex();
|
||||
|
||||
websocketHandler.handleReorg();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,7 +736,7 @@ class Blocks {
|
|||
|
||||
// Save blocks summary for visualization if it's enabled
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await this.$getStrippedBlockTransactions(blockExtended.id, true);
|
||||
await this.$getStrippedBlockTransactions(blockExtended.id, true, false, cpfpSummary, blockExtended.height);
|
||||
this.updateTimerProgress(timer, `saved block summary for ${this.currentBlockHeight}`);
|
||||
}
|
||||
if (config.MEMPOOL.CPFP_INDEXING) {
|
||||
|
@ -714,6 +746,10 @@ class Blocks {
|
|||
}
|
||||
}
|
||||
|
||||
// start async callbacks
|
||||
this.updateTimerProgress(timer, `starting async callbacks for ${this.currentBlockHeight}`);
|
||||
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
|
||||
|
||||
if (block.height % 2016 === 0) {
|
||||
if (Common.indexingEnabled()) {
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
|
@ -730,6 +766,11 @@ class Blocks {
|
|||
this.currentDifficulty = block.difficulty;
|
||||
}
|
||||
|
||||
// wait for pending async callbacks to finish
|
||||
this.updateTimerProgress(timer, `waiting for async callbacks to complete for ${this.currentBlockHeight}`);
|
||||
await Promise.all(callbackPromises);
|
||||
this.updateTimerProgress(timer, `async callbacks completed for ${this.currentBlockHeight}`);
|
||||
|
||||
this.blocks.push(blockExtended);
|
||||
if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
|
||||
this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
|
||||
|
@ -746,11 +787,6 @@ class Blocks {
|
|||
diskCache.$saveCacheToDisk();
|
||||
}
|
||||
|
||||
// wait for pending async callbacks to finish
|
||||
this.updateTimerProgress(timer, `waiting for async callbacks to complete for ${this.currentBlockHeight}`);
|
||||
await Promise.all(callbackPromises);
|
||||
this.updateTimerProgress(timer, `async callbacks completed for ${this.currentBlockHeight}`);
|
||||
|
||||
handledBlocks++;
|
||||
}
|
||||
|
||||
|
@ -806,6 +842,16 @@ class Blocks {
|
|||
return blockExtended;
|
||||
}
|
||||
|
||||
public async $indexStaleBlock(hash: string): Promise<BlockExtended> {
|
||||
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
|
||||
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
|
||||
blockExtended.canonical = await bitcoinApi.$getBlockHash(block.height);
|
||||
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get one block by its hash
|
||||
*/
|
||||
|
@ -823,11 +869,15 @@ class Blocks {
|
|||
|
||||
// Bitcoin network, add our custom data on top
|
||||
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
|
||||
return await this.$indexBlock(block.height);
|
||||
if (block.stale) {
|
||||
return await this.$indexStaleBlock(hash);
|
||||
} else {
|
||||
return await this.$indexBlock(block.height);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
|
||||
skipDBLookup = false): Promise<TransactionStripped[]>
|
||||
skipDBLookup = false, cpfpSummary?: CpfpSummary, blockHeight?: number): Promise<TransactionStripped[]>
|
||||
{
|
||||
if (skipMemoryCache === false) {
|
||||
// Check the memory cache
|
||||
|
@ -845,13 +895,35 @@ class Blocks {
|
|||
}
|
||||
}
|
||||
|
||||
// Call Core RPC
|
||||
const block = await bitcoinClient.getBlock(hash, 2);
|
||||
const summary = this.summarizeBlock(block);
|
||||
let height = blockHeight;
|
||||
let summary: BlockSummary;
|
||||
if (cpfpSummary && !Common.isLiquid()) {
|
||||
summary = {
|
||||
id: hash,
|
||||
transactions: cpfpSummary.transactions.map(tx => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
fee: tx.fee,
|
||||
vsize: tx.vsize,
|
||||
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0)),
|
||||
rate: tx.effectiveFeePerVsize
|
||||
};
|
||||
}),
|
||||
};
|
||||
} else {
|
||||
// Call Core RPC
|
||||
const block = await bitcoinClient.getBlock(hash, 2);
|
||||
summary = this.summarizeBlock(block);
|
||||
height = block.height;
|
||||
}
|
||||
if (height == null) {
|
||||
const block = await bitcoinApi.$getBlock(hash);
|
||||
height = block.height;
|
||||
}
|
||||
|
||||
// Index the response if needed
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await BlocksSummariesRepository.$saveTransactions(block.height, block.hash, summary.transactions);
|
||||
await BlocksSummariesRepository.$saveTransactions(height, hash, summary.transactions);
|
||||
}
|
||||
|
||||
return summary.transactions;
|
||||
|
@ -1007,19 +1079,11 @@ class Blocks {
|
|||
}
|
||||
|
||||
public async $getBlockAuditSummary(hash: string): Promise<any> {
|
||||
let summary;
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
|
||||
return BlocksAuditsRepository.$getBlockAudit(hash);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
// fallback to non-audited transaction summary
|
||||
if (!summary?.transactions?.length) {
|
||||
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
|
||||
summary = {
|
||||
transactions: strippedTransactions
|
||||
};
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
|
||||
public getLastDifficultyAdjustmentTime(): number {
|
||||
|
@ -1050,9 +1114,13 @@ class Blocks {
|
|||
}
|
||||
|
||||
public async $saveCpfp(hash: string, height: number, cpfpSummary: CpfpSummary): Promise<void> {
|
||||
const result = await cpfpRepository.$batchSaveClusters(cpfpSummary.clusters);
|
||||
if (!result) {
|
||||
await cpfpRepository.$insertProgressMarker(height);
|
||||
try {
|
||||
const result = await cpfpRepository.$batchSaveClusters(cpfpSummary.clusters);
|
||||
if (!result) {
|
||||
await cpfpRepository.$insertProgressMarker(height);
|
||||
}
|
||||
} catch (e) {
|
||||
// not a fatal error, we'll try again next time the indexer runs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import * as bitcoinjs from 'bitcoinjs-lib';
|
||||
import { Request } from 'express';
|
||||
import { Ancestor, CpfpInfo, CpfpSummary, CpfpCluster, EffectiveFeeStats, MempoolBlockWithTransactions, TransactionExtended, MempoolTransactionExtended, TransactionStripped, WorkingEffectiveFeeStats } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import { NodeSocket } from '../repositories/NodesSocketsRepository';
|
||||
|
@ -86,19 +88,19 @@ export class Common {
|
|||
const match = spendMap.get(`${vin.txid}:${vin.vout}`);
|
||||
if (match && match.txid !== tx.txid) {
|
||||
replaced.add(match);
|
||||
// remove this tx from the spendMap
|
||||
// prevents the same tx being replaced more than once
|
||||
for (const replacedVin of match.vin) {
|
||||
const key = `${replacedVin.txid}:${replacedVin.vout}`;
|
||||
spendMap.delete(key);
|
||||
}
|
||||
}
|
||||
const key = `${vin.txid}:${vin.vout}`;
|
||||
spendMap.delete(key);
|
||||
}
|
||||
if (replaced.size) {
|
||||
matches[tx.txid] = { replaced: Array.from(replaced), replacedBy: tx };
|
||||
}
|
||||
// remove this tx from the spendMap
|
||||
// prevents the same tx being replaced more than once
|
||||
for (const vin of tx.vin) {
|
||||
const key = `${vin.txid}:${vin.vout}`;
|
||||
if (spendMap.get(key)?.txid === tx.txid) {
|
||||
spendMap.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
|
@ -113,6 +115,10 @@ export class Common {
|
|||
};
|
||||
}
|
||||
|
||||
static stripTransactions(txs: TransactionExtended[]): TransactionStripped[] {
|
||||
return txs.map(this.stripTransaction);
|
||||
}
|
||||
|
||||
static sleep$(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
|
@ -507,6 +513,115 @@ export class Common {
|
|||
static getNthPercentile(n: number, sortedDistribution: any[]): any {
|
||||
return sortedDistribution[Math.floor((sortedDistribution.length - 1) * (n / 100))];
|
||||
}
|
||||
|
||||
static getTransactionFromRequest(req: Request, form: boolean): string {
|
||||
let rawTx: any = typeof req.body === 'object' && form
|
||||
? Object.values(req.body)[0] as any
|
||||
: req.body;
|
||||
if (typeof rawTx !== 'string') {
|
||||
throw Object.assign(new Error('Non-string request body'), { code: -1 });
|
||||
}
|
||||
|
||||
// Support both upper and lower case hex
|
||||
// Support both txHash= Form and direct API POST
|
||||
const reg = form ? /^txHash=((?:[a-fA-F0-9]{2})+)$/ : /^((?:[a-fA-F0-9]{2})+)$/;
|
||||
const matches = reg.exec(rawTx);
|
||||
if (!matches || !matches[1]) {
|
||||
throw Object.assign(new Error('Non-hex request body'), { code: -2 });
|
||||
}
|
||||
|
||||
// Guaranteed to be a hex string of multiple of 2
|
||||
// Guaranteed to be lower case
|
||||
// Guaranteed to pass validation (see function below)
|
||||
return this.validateTransactionHex(matches[1].toLowerCase());
|
||||
}
|
||||
|
||||
private static validateTransactionHex(txhex: string): string {
|
||||
// Do not mutate txhex
|
||||
|
||||
// We assume txhex to be valid hex (output of getTransactionFromRequest above)
|
||||
|
||||
// Check 1: Valid transaction parse
|
||||
let tx: bitcoinjs.Transaction;
|
||||
try {
|
||||
tx = bitcoinjs.Transaction.fromHex(txhex);
|
||||
} catch(e) {
|
||||
throw Object.assign(new Error('Invalid transaction (could not parse)'), { code: -4 });
|
||||
}
|
||||
|
||||
// Check 2: Simple size check
|
||||
if (tx.weight() > config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT) {
|
||||
throw Object.assign(new Error(`Transaction too large (max ${config.MEMPOOL.MAX_PUSH_TX_SIZE_WEIGHT} weight units)`), { code: -3 });
|
||||
}
|
||||
|
||||
// Check 3: Check unreachable script in taproot (if not allowed)
|
||||
if (!config.MEMPOOL.ALLOW_UNREACHABLE) {
|
||||
tx.ins.forEach(input => {
|
||||
const witness = input.witness;
|
||||
// See BIP 341: Script validation rules
|
||||
const hasAnnex = witness.length >= 2 &&
|
||||
witness[witness.length - 1][0] === 0x50;
|
||||
const scriptSpendMinLength = hasAnnex ? 3 : 2;
|
||||
const maybeScriptSpend = witness.length >= scriptSpendMinLength;
|
||||
|
||||
if (maybeScriptSpend) {
|
||||
const controlBlock = witness[witness.length - scriptSpendMinLength + 1];
|
||||
if (controlBlock.length === 0 || !this.isValidLeafVersion(controlBlock[0])) {
|
||||
// Skip this input, it's not taproot
|
||||
return;
|
||||
}
|
||||
// Definitely taproot. Get script
|
||||
const script = witness[witness.length - scriptSpendMinLength];
|
||||
const decompiled = bitcoinjs.script.decompile(script);
|
||||
if (!decompiled || decompiled.length < 2) {
|
||||
// Skip this input
|
||||
return;
|
||||
}
|
||||
// Iterate up to second last (will look ahead 1 item)
|
||||
for (let i = 0; i < decompiled.length - 1; i++) {
|
||||
const first = decompiled[i];
|
||||
const second = decompiled[i + 1];
|
||||
if (
|
||||
first === bitcoinjs.opcodes.OP_FALSE &&
|
||||
second === bitcoinjs.opcodes.OP_IF
|
||||
) {
|
||||
throw Object.assign(new Error('Unreachable taproot scripts not allowed'), { code: -5 });
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Pass through the input string untouched
|
||||
return txhex;
|
||||
}
|
||||
|
||||
private static isValidLeafVersion(leafVersion: number): boolean {
|
||||
// See Note 7 in BIP341
|
||||
// https://github.com/bitcoin/bips/blob/66a1a8151021913047934ebab3f8883f2f8ca75b/bip-0341.mediawiki#cite_note-7
|
||||
// "What constraints are there on the leaf version?"
|
||||
|
||||
// Must be an integer between 0 and 255
|
||||
// Since we're parsing a byte
|
||||
if (Math.floor(leafVersion) !== leafVersion || leafVersion < 0 || leafVersion > 255) {
|
||||
return false;
|
||||
}
|
||||
// "the leaf version cannot be odd"
|
||||
if ((leafVersion & 0x01) === 1) {
|
||||
return false;
|
||||
}
|
||||
// "The values that comply to this rule are
|
||||
// the 32 even values between 0xc0 and 0xfe
|
||||
if (leafVersion >= 0xc0 && leafVersion <= 0xfe) {
|
||||
return true;
|
||||
}
|
||||
// and also 0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe."
|
||||
if ([0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe].includes(leafVersion)) {
|
||||
return true;
|
||||
}
|
||||
// Otherwise, invalid
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
|
|||
import { RowDataPacket } from 'mysql2';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 62;
|
||||
private static currentVersion = 64;
|
||||
private queryTimeout = 3600_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
@ -539,6 +539,15 @@ class DatabaseMigration {
|
|||
await this.updateToSchemaVersion(62);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 63 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fullrbf_txs JSON DEFAULT "[]"');
|
||||
await this.updateToSchemaVersion(63);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 64 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD features text NULL');
|
||||
await this.updateToSchemaVersion(64);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -195,6 +195,7 @@ class DiskCache {
|
|||
|
||||
if (data.mempoolArray) {
|
||||
for (const tx of data.mempoolArray) {
|
||||
delete tx.uid;
|
||||
data.mempool[tx.txid] = tx;
|
||||
}
|
||||
}
|
||||
|
@ -207,6 +208,7 @@ class DiskCache {
|
|||
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
|
||||
if (cacheData2.mempoolArray) {
|
||||
for (const tx of cacheData2.mempoolArray) {
|
||||
delete tx.uid;
|
||||
data.mempool[tx.txid] = tx;
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -80,7 +80,7 @@ class ChannelsApi {
|
|||
|
||||
public async $searchChannelsById(search: string): Promise<any[]> {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const searchStripped = search.replace(/[^0-9x]/g, '') + '%';
|
||||
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
|
@ -117,6 +117,26 @@ class ChannelsApi {
|
|||
}
|
||||
}
|
||||
|
||||
public async $getPenaltyClosedChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT n1.alias AS alias_left,
|
||||
n2.alias AS alias_right,
|
||||
channels.*
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
|
||||
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
|
||||
WHERE channels.status = 2 AND channels.closing_reason = 3
|
||||
ORDER BY closing_date DESC
|
||||
`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getPenaltyClosedChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getUnresolvedClosedChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
|
||||
|
|
|
@ -11,6 +11,7 @@ class ChannelsRoutes {
|
|||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/penalties', this.$getPenaltyClosedChannels)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
|
||||
;
|
||||
|
@ -108,6 +109,18 @@ class ChannelsRoutes {
|
|||
}
|
||||
}
|
||||
|
||||
private async $getPenaltyClosedChannels(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const channels = await channelsApi.$getPenaltyClosedChannels();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAllChannelsGeo(req: Request, res: Response) {
|
||||
try {
|
||||
const style: string = typeof req.query.style === 'string' ? req.query.style : '';
|
||||
|
|
|
@ -3,6 +3,7 @@ import DB from '../../database';
|
|||
import { ResultSetHeader } from 'mysql2';
|
||||
import { ILightningApi } from '../lightning/lightning-api.interface';
|
||||
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
|
||||
import { bin2hex } from '../../utils/format';
|
||||
|
||||
class NodesApi {
|
||||
public async $getWorldNodes(): Promise<any> {
|
||||
|
@ -56,7 +57,8 @@ class NodesApi {
|
|||
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
|
||||
as_number, city_id, country_id, subdivision_id, longitude, latitude,
|
||||
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision,
|
||||
features
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
|
||||
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
|
||||
|
@ -76,6 +78,23 @@ class NodesApi {
|
|||
node.city = JSON.parse(node.city);
|
||||
node.country = JSON.parse(node.country);
|
||||
|
||||
// Features
|
||||
node.features = JSON.parse(node.features);
|
||||
node.featuresBits = null;
|
||||
if (node.features) {
|
||||
let maxBit = 0;
|
||||
for (const feature of node.features) {
|
||||
maxBit = Math.max(maxBit, feature.bit);
|
||||
}
|
||||
maxBit = Math.ceil(maxBit / 4) * 4 - 1;
|
||||
|
||||
node.featuresBits = new Array(maxBit + 1).fill(0);
|
||||
for (const feature of node.features) {
|
||||
node.featuresBits[feature.bit] = 1;
|
||||
}
|
||||
node.featuresBits = bin2hex(node.featuresBits.reverse().join(''));
|
||||
}
|
||||
|
||||
// Active channels and capacity
|
||||
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
|
||||
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
|
||||
|
@ -656,10 +675,19 @@ class NodesApi {
|
|||
alias_search,
|
||||
color,
|
||||
sockets,
|
||||
status
|
||||
status,
|
||||
features
|
||||
)
|
||||
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
|
||||
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
|
||||
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
updated_at = FROM_UNIXTIME(?),
|
||||
alias = ?,
|
||||
alias_search = ?,
|
||||
color = ?,
|
||||
sockets = ?,
|
||||
status = 1,
|
||||
features = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.pub_key,
|
||||
|
@ -668,11 +696,13 @@ class NodesApi {
|
|||
this.aliasToSearchText(node.alias),
|
||||
node.color,
|
||||
sockets,
|
||||
JSON.stringify(node.features),
|
||||
node.last_update,
|
||||
node.alias,
|
||||
this.aliasToSearchText(node.alias),
|
||||
node.color,
|
||||
sockets,
|
||||
JSON.stringify(node.features),
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
|
||||
|
|
|
@ -2,8 +2,91 @@ import { ILightningApi } from '../lightning-api.interface';
|
|||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import logger from '../../../logger';
|
||||
import { Common } from '../../common';
|
||||
import { hex2bin } from '../../../utils/format';
|
||||
import config from '../../../config';
|
||||
|
||||
// https://github.com/lightningnetwork/lnd/blob/master/lnwire/features.go
|
||||
export enum FeatureBits {
|
||||
DataLossProtectRequired = 0,
|
||||
DataLossProtectOptional = 1,
|
||||
InitialRoutingSync = 3,
|
||||
UpfrontShutdownScriptRequired = 4,
|
||||
UpfrontShutdownScriptOptional = 5,
|
||||
GossipQueriesRequired = 6,
|
||||
GossipQueriesOptional = 7,
|
||||
TLVOnionPayloadRequired = 8,
|
||||
TLVOnionPayloadOptional = 9,
|
||||
StaticRemoteKeyRequired = 12,
|
||||
StaticRemoteKeyOptional = 13,
|
||||
PaymentAddrRequired = 14,
|
||||
PaymentAddrOptional = 15,
|
||||
MPPRequired = 16,
|
||||
MPPOptional = 17,
|
||||
WumboChannelsRequired = 18,
|
||||
WumboChannelsOptional = 19,
|
||||
AnchorsRequired = 20,
|
||||
AnchorsOptional = 21,
|
||||
AnchorsZeroFeeHtlcTxRequired = 22,
|
||||
AnchorsZeroFeeHtlcTxOptional = 23,
|
||||
ShutdownAnySegwitRequired = 26,
|
||||
ShutdownAnySegwitOptional = 27,
|
||||
AMPRequired = 30,
|
||||
AMPOptional = 31,
|
||||
ExplicitChannelTypeRequired = 44,
|
||||
ExplicitChannelTypeOptional = 45,
|
||||
ScidAliasRequired = 46,
|
||||
ScidAliasOptional = 47,
|
||||
PaymentMetadataRequired = 48,
|
||||
PaymentMetadataOptional = 49,
|
||||
ZeroConfRequired = 50,
|
||||
ZeroConfOptional = 51,
|
||||
KeysendRequired = 54,
|
||||
KeysendOptional = 55,
|
||||
ScriptEnforcedLeaseRequired = 2022,
|
||||
ScriptEnforcedLeaseOptional = 2023,
|
||||
MaxBolt11Feature = 5114,
|
||||
};
|
||||
|
||||
export const FeaturesMap = new Map<FeatureBits, string>([
|
||||
[FeatureBits.DataLossProtectRequired, 'data-loss-protect'],
|
||||
[FeatureBits.DataLossProtectOptional, 'data-loss-protect'],
|
||||
[FeatureBits.InitialRoutingSync, 'initial-routing-sync'],
|
||||
[FeatureBits.UpfrontShutdownScriptRequired, 'upfront-shutdown-script'],
|
||||
[FeatureBits.UpfrontShutdownScriptOptional, 'upfront-shutdown-script'],
|
||||
[FeatureBits.GossipQueriesRequired, 'gossip-queries'],
|
||||
[FeatureBits.GossipQueriesOptional, 'gossip-queries'],
|
||||
[FeatureBits.TLVOnionPayloadRequired, 'tlv-onion'],
|
||||
[FeatureBits.TLVOnionPayloadOptional, 'tlv-onion'],
|
||||
[FeatureBits.StaticRemoteKeyOptional, 'static-remote-key'],
|
||||
[FeatureBits.StaticRemoteKeyRequired, 'static-remote-key'],
|
||||
[FeatureBits.PaymentAddrOptional, 'payment-addr'],
|
||||
[FeatureBits.PaymentAddrRequired, 'payment-addr'],
|
||||
[FeatureBits.MPPOptional, 'multi-path-payments'],
|
||||
[FeatureBits.MPPRequired, 'multi-path-payments'],
|
||||
[FeatureBits.AnchorsRequired, 'anchor-commitments'],
|
||||
[FeatureBits.AnchorsOptional, 'anchor-commitments'],
|
||||
[FeatureBits.AnchorsZeroFeeHtlcTxRequired, 'anchors-zero-fee-htlc-tx'],
|
||||
[FeatureBits.AnchorsZeroFeeHtlcTxOptional, 'anchors-zero-fee-htlc-tx'],
|
||||
[FeatureBits.WumboChannelsRequired, 'wumbo-channels'],
|
||||
[FeatureBits.WumboChannelsOptional, 'wumbo-channels'],
|
||||
[FeatureBits.AMPRequired, 'amp'],
|
||||
[FeatureBits.AMPOptional, 'amp'],
|
||||
[FeatureBits.PaymentMetadataOptional, 'payment-metadata'],
|
||||
[FeatureBits.PaymentMetadataRequired, 'payment-metadata'],
|
||||
[FeatureBits.ExplicitChannelTypeOptional, 'explicit-commitment-type'],
|
||||
[FeatureBits.ExplicitChannelTypeRequired, 'explicit-commitment-type'],
|
||||
[FeatureBits.KeysendOptional, 'keysend'],
|
||||
[FeatureBits.KeysendRequired, 'keysend'],
|
||||
[FeatureBits.ScriptEnforcedLeaseRequired, 'script-enforced-lease'],
|
||||
[FeatureBits.ScriptEnforcedLeaseOptional, 'script-enforced-lease'],
|
||||
[FeatureBits.ScidAliasRequired, 'scid-alias'],
|
||||
[FeatureBits.ScidAliasOptional, 'scid-alias'],
|
||||
[FeatureBits.ZeroConfRequired, 'zero-conf'],
|
||||
[FeatureBits.ZeroConfOptional, 'zero-conf'],
|
||||
[FeatureBits.ShutdownAnySegwitRequired, 'shutdown-any-segwit'],
|
||||
[FeatureBits.ShutdownAnySegwitOptional, 'shutdown-any-segwit'],
|
||||
]);
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
*/
|
||||
|
@ -17,10 +100,36 @@ export function convertNode(clNode: any): ILightningApi.Node {
|
|||
custom_records = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
const nodeFeatures: ILightningApi.Feature[] = [];
|
||||
const nodeFeaturesBinary = hex2bin(clNode.features).split('').reverse().join('');
|
||||
|
||||
for (let i = 0; i < nodeFeaturesBinary.length; i++) {
|
||||
if (nodeFeaturesBinary[i] === '0') {
|
||||
continue;
|
||||
}
|
||||
const feature = FeaturesMap.get(i);
|
||||
if (!feature) {
|
||||
nodeFeatures.push({
|
||||
bit: i,
|
||||
name: 'unknown',
|
||||
is_required: i % 2 === 0,
|
||||
is_known: false
|
||||
});
|
||||
} else {
|
||||
nodeFeatures.push({
|
||||
bit: i,
|
||||
name: feature,
|
||||
is_required: i % 2 === 0,
|
||||
is_known: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
alias: clNode.alias ?? '',
|
||||
color: `#${clNode.color ?? ''}`,
|
||||
features: [], // TODO parse and return clNode.feature
|
||||
features: nodeFeatures,
|
||||
pub_key: clNode.nodeid,
|
||||
addresses: clNode.addresses?.map((addr) => {
|
||||
let address = addr.address;
|
||||
|
@ -108,7 +217,7 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
|
|||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
|
||||
capacity: clChannelA.satoshis,
|
||||
capacity: (clChannelA.amount_msat / 1000).toString(),
|
||||
last_update: lastUpdate,
|
||||
node1_policy: convertPolicy(clChannelA),
|
||||
node2_policy: convertPolicy(clChannelB),
|
||||
|
@ -132,7 +241,7 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
|
|||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
|
||||
capacity: clChannel.satoshis,
|
||||
capacity: (clChannel.amount_msat / 1000).toString(),
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
node1_policy: convertPolicy(clChannel),
|
||||
node2_policy: null,
|
||||
|
@ -148,8 +257,8 @@ async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Cha
|
|||
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
||||
return {
|
||||
time_lock_delta: clChannel.delay,
|
||||
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
||||
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
|
||||
min_htlc: clChannel.htlc_minimum_msat.toString(),
|
||||
max_htlc_msat: clChannel.htlc_maximum_msat.toString(),
|
||||
fee_base_msat: clChannel.base_fee_millisatoshi,
|
||||
fee_rate_milli_msat: clChannel.fee_per_millionth,
|
||||
disabled: !clChannel.active,
|
||||
|
|
|
@ -79,6 +79,7 @@ export namespace ILightningApi {
|
|||
}
|
||||
|
||||
export interface Feature {
|
||||
bit: number;
|
||||
name: string;
|
||||
is_required: boolean;
|
||||
is_known: boolean;
|
||||
|
|
|
@ -41,8 +41,23 @@ class LndApi implements AbstractLightningApi {
|
|||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
|
||||
const graph = await axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
|
||||
for (const node of graph.nodes) {
|
||||
const nodeFeatures: ILightningApi.Feature[] = [];
|
||||
for (const bit in node.features) {
|
||||
nodeFeatures.push({
|
||||
bit: parseInt(bit, 10),
|
||||
name: node.features[bit].name,
|
||||
is_required: node.features[bit].is_required,
|
||||
is_known: node.features[bit].is_known,
|
||||
});
|
||||
}
|
||||
node.features = nodeFeatures;
|
||||
}
|
||||
|
||||
return graph;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction } from '../../rust-gbt';
|
||||
import logger from '../logger';
|
||||
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces';
|
||||
import { Common, OnlineFeeStatsCalculator } from './common';
|
||||
|
@ -5,16 +6,18 @@ import config from '../config';
|
|||
import { Worker } from 'worker_threads';
|
||||
import path from 'path';
|
||||
|
||||
const MAX_UINT32 = Math.pow(2, 32) - 1;
|
||||
|
||||
class MempoolBlocks {
|
||||
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
private txSelectionWorker: Worker | null = null;
|
||||
private rustInitialized: boolean = false;
|
||||
private rustGbtGenerator: GbtGenerator = new GbtGenerator();
|
||||
|
||||
private nextUid: number = 1;
|
||||
private uidMap: Map<number, string> = new Map(); // map short numerical uids to full txids
|
||||
|
||||
constructor() {}
|
||||
|
||||
public getMempoolBlocks(): MempoolBlock[] {
|
||||
return this.mempoolBlocks.map((block) => {
|
||||
return {
|
||||
|
@ -40,9 +43,7 @@ class MempoolBlocks {
|
|||
const latestMempool = memPool;
|
||||
const memPoolArray: MempoolTransactionExtended[] = [];
|
||||
for (const i in latestMempool) {
|
||||
if (latestMempool.hasOwnProperty(i)) {
|
||||
memPoolArray.push(latestMempool[i]);
|
||||
}
|
||||
memPoolArray.push(latestMempool[i]);
|
||||
}
|
||||
const start = new Date().getTime();
|
||||
|
||||
|
@ -143,7 +144,7 @@ class MempoolBlocks {
|
|||
const stackWeight = transactionsSorted.slice(index).reduce((total, tx) => total + (tx.weight || 0), 0);
|
||||
if (stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
|
||||
onlineStats = true;
|
||||
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5);
|
||||
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
|
||||
feeStatsCalculator.processNext(tx);
|
||||
}
|
||||
}
|
||||
|
@ -218,16 +219,17 @@ class MempoolBlocks {
|
|||
// to reduce the overhead of passing this data to the worker thread
|
||||
const strippedMempool: Map<number, CompactThreadTransaction> = new Map();
|
||||
Object.values(newMempool).forEach(entry => {
|
||||
if (entry.uid != null) {
|
||||
strippedMempool.set(entry.uid, {
|
||||
if (entry.uid !== null && entry.uid !== undefined) {
|
||||
const stripped = {
|
||||
uid: entry.uid,
|
||||
fee: entry.fee,
|
||||
weight: (entry.adjustedVsize * 4),
|
||||
sigops: entry.sigops,
|
||||
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
|
||||
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
|
||||
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => uid != null) as number[],
|
||||
});
|
||||
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
|
||||
};
|
||||
strippedMempool.set(entry.uid, stripped);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -260,8 +262,10 @@ class MempoolBlocks {
|
|||
// clean up thread error listener
|
||||
this.txSelectionWorker?.removeListener('error', threadErrorListener);
|
||||
|
||||
const processed = this.processBlockTemplates(newMempool, blocks, rates, clusters, saveResults);
|
||||
const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
|
||||
|
||||
logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
|
||||
|
||||
return processed;
|
||||
} catch (e) {
|
||||
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
|
||||
|
@ -279,12 +283,12 @@ class MempoolBlocks {
|
|||
const start = Date.now();
|
||||
|
||||
for (const tx of Object.values(added)) {
|
||||
this.setUid(tx);
|
||||
this.setUid(tx, true);
|
||||
}
|
||||
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => uid != null) as number[];
|
||||
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
|
||||
// prepare a stripped down version of the mempool with only the minimum necessary data
|
||||
// to reduce the overhead of passing this data to the worker thread
|
||||
const addedStripped: CompactThreadTransaction[] = added.filter(entry => entry.uid != null).map(entry => {
|
||||
const addedStripped: CompactThreadTransaction[] = added.filter(entry => (entry.uid !== null && entry.uid !== undefined)).map(entry => {
|
||||
return {
|
||||
uid: entry.uid || 0,
|
||||
fee: entry.fee,
|
||||
|
@ -292,7 +296,7 @@ class MempoolBlocks {
|
|||
sigops: entry.sigops,
|
||||
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
|
||||
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
|
||||
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => uid != null) as number[],
|
||||
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -314,84 +318,131 @@ class MempoolBlocks {
|
|||
// clean up thread error listener
|
||||
this.txSelectionWorker?.removeListener('error', threadErrorListener);
|
||||
|
||||
this.processBlockTemplates(newMempool, blocks, rates, clusters, saveResults);
|
||||
this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
|
||||
logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`);
|
||||
} catch (e) {
|
||||
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private processBlockTemplates(mempool, blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }, saveResults): MempoolBlockWithTransactions[] {
|
||||
for (const txid of Object.keys(rates)) {
|
||||
private resetRustGbt(): void {
|
||||
this.rustInitialized = false;
|
||||
this.rustGbtGenerator = new GbtGenerator();
|
||||
}
|
||||
|
||||
private async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
|
||||
const start = Date.now();
|
||||
|
||||
// reset mempool short ids
|
||||
if (saveResults) {
|
||||
this.resetUids();
|
||||
}
|
||||
// set missing short ids
|
||||
for (const tx of Object.values(newMempool)) {
|
||||
this.setUid(tx, !saveResults);
|
||||
}
|
||||
// set short ids for transaction inputs
|
||||
for (const tx of Object.values(newMempool)) {
|
||||
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
|
||||
}
|
||||
|
||||
// run the block construction algorithm in a separate thread, and wait for a result
|
||||
const rustGbt = saveResults ? this.rustGbtGenerator : new GbtGenerator();
|
||||
try {
|
||||
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
|
||||
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], this.nextUid),
|
||||
);
|
||||
if (saveResults) {
|
||||
this.rustInitialized = true;
|
||||
}
|
||||
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, saveResults);
|
||||
logger.debug(`RUST makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
|
||||
return processed;
|
||||
} catch (e) {
|
||||
logger.err('RUST makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
|
||||
if (saveResults) {
|
||||
this.resetRustGbt();
|
||||
}
|
||||
}
|
||||
return this.mempoolBlocks;
|
||||
}
|
||||
|
||||
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }): Promise<MempoolBlockWithTransactions[]> {
|
||||
return this.$rustMakeBlockTemplates(newMempool, false);
|
||||
}
|
||||
|
||||
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[]): Promise<void> {
|
||||
// GBT optimization requires that uids never get too sparse
|
||||
// as a sanity check, we should also explicitly prevent uint32 uid overflow
|
||||
if (this.nextUid + added.length >= Math.min(Math.max(262144, 2 * mempoolSize), MAX_UINT32)) {
|
||||
this.resetRustGbt();
|
||||
}
|
||||
if (!this.rustInitialized) {
|
||||
// need to reset the worker
|
||||
await this.$rustMakeBlockTemplates(newMempool, true);
|
||||
return;
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
// set missing short ids
|
||||
for (const tx of added) {
|
||||
this.setUid(tx, true);
|
||||
}
|
||||
// set short ids for transaction inputs
|
||||
for (const tx of added) {
|
||||
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
|
||||
}
|
||||
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
|
||||
|
||||
// run the block construction algorithm in a separate thread, and wait for a result
|
||||
try {
|
||||
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
|
||||
await this.rustGbtGenerator.update(
|
||||
added as RustThreadTransaction[],
|
||||
removedUids,
|
||||
this.nextUid,
|
||||
),
|
||||
);
|
||||
const resultMempoolSize = blocks.reduce((total, block) => total + block.length, 0);
|
||||
if (mempoolSize !== resultMempoolSize) {
|
||||
throw new Error('GBT returned wrong number of transactions, cache is probably out of sync');
|
||||
} else {
|
||||
this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, true);
|
||||
}
|
||||
this.removeUids(removedUids);
|
||||
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
|
||||
} catch (e) {
|
||||
logger.err('RUST updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
|
||||
this.resetRustGbt();
|
||||
}
|
||||
}
|
||||
|
||||
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], saveResults): MempoolBlockWithTransactions[] {
|
||||
for (const [txid, rate] of rates) {
|
||||
if (txid in mempool) {
|
||||
mempool[txid].effectiveFeePerVsize = rates[txid];
|
||||
mempool[txid].effectiveFeePerVsize = rate;
|
||||
mempool[txid].cpfpChecked = false;
|
||||
}
|
||||
}
|
||||
|
||||
const lastBlockIndex = blocks.length - 1;
|
||||
let hasBlockStack = blocks.length >= 8;
|
||||
let stackWeight;
|
||||
let feeStatsCalculator: OnlineFeeStatsCalculator | void;
|
||||
if (hasBlockStack) {
|
||||
stackWeight = blocks[blocks.length - 1].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0);
|
||||
hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS;
|
||||
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5);
|
||||
}
|
||||
|
||||
const readyBlocks: { transactionIds, transactions, totalSize, totalWeight, totalFees, feeStats }[] = [];
|
||||
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
|
||||
// update this thread's mempool with the results
|
||||
for (let blockIndex = 0; blockIndex < blocks.length; blockIndex++) {
|
||||
const block: string[] = blocks[blockIndex];
|
||||
let txid: string;
|
||||
let mempoolTx: MempoolTransactionExtended;
|
||||
let totalSize = 0;
|
||||
let totalVsize = 0;
|
||||
let totalWeight = 0;
|
||||
let totalFees = 0;
|
||||
const transactions: MempoolTransactionExtended[] = [];
|
||||
for (let txIndex = 0; txIndex < block.length; txIndex++) {
|
||||
txid = block[txIndex];
|
||||
if (txid) {
|
||||
mempoolTx = mempool[txid];
|
||||
// save position in projected blocks
|
||||
mempoolTx.position = {
|
||||
block: blockIndex,
|
||||
vsize: totalVsize + (mempoolTx.vsize / 2),
|
||||
};
|
||||
mempoolTx.ancestors = [];
|
||||
mempoolTx.descendants = [];
|
||||
mempoolTx.bestDescendant = null;
|
||||
mempoolTx.cpfpChecked = true;
|
||||
|
||||
// online calculation of stack-of-blocks fee stats
|
||||
if (hasBlockStack && blockIndex === blocks.length - 1 && feeStatsCalculator) {
|
||||
feeStatsCalculator.processNext(mempoolTx);
|
||||
}
|
||||
|
||||
totalSize += mempoolTx.size;
|
||||
totalVsize += mempoolTx.vsize;
|
||||
totalWeight += mempoolTx.weight;
|
||||
totalFees += mempoolTx.fee;
|
||||
|
||||
if (totalVsize <= sizeLimit) {
|
||||
transactions.push(mempoolTx);
|
||||
}
|
||||
}
|
||||
if (blockWeights && blockWeights[7] !== null) {
|
||||
stackWeight = blockWeights[7];
|
||||
} else {
|
||||
stackWeight = blocks[lastBlockIndex].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0);
|
||||
}
|
||||
readyBlocks.push({
|
||||
transactionIds: block,
|
||||
transactions,
|
||||
totalSize,
|
||||
totalWeight,
|
||||
totalFees,
|
||||
feeStats: (hasBlockStack && blockIndex === blocks.length - 1 && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
|
||||
});
|
||||
hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS;
|
||||
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
|
||||
}
|
||||
|
||||
for (const cluster of Object.values(clusters)) {
|
||||
for (const cluster of clusters) {
|
||||
for (const memberTxid of cluster) {
|
||||
if (memberTxid in mempool) {
|
||||
const mempoolTx = mempool[memberTxid];
|
||||
const mempoolTx = mempool[memberTxid];
|
||||
if (mempoolTx) {
|
||||
const ancestors: Ancestor[] = [];
|
||||
const descendants: Ancestor[] = [];
|
||||
let matched = false;
|
||||
|
@ -406,20 +457,68 @@ class MempoolBlocks {
|
|||
};
|
||||
if (matched) {
|
||||
descendants.push(relative);
|
||||
mempoolTx.lastBoosted = Math.max(mempoolTx.lastBoosted || 0, mempool[txid].firstSeen || 0);
|
||||
} else {
|
||||
ancestors.push(relative);
|
||||
}
|
||||
}
|
||||
});
|
||||
mempoolTx.ancestors = ancestors;
|
||||
mempoolTx.descendants = descendants;
|
||||
mempoolTx.bestDescendant = null;
|
||||
Object.assign(mempoolTx, {ancestors, descendants, bestDescendant: null, cpfpChecked: true});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mempoolBlocks = readyBlocks.map((b, index) => {
|
||||
return this.dataToMempoolBlocks(b.transactionIds, b.transactions, b.totalSize, b.totalWeight, b.totalFees, b.feeStats);
|
||||
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
|
||||
// update this thread's mempool with the results
|
||||
let mempoolTx: MempoolTransactionExtended;
|
||||
const mempoolBlocks: MempoolBlockWithTransactions[] = blocks.map((block, blockIndex) => {
|
||||
let totalSize = 0;
|
||||
let totalVsize = 0;
|
||||
let totalWeight = 0;
|
||||
let totalFees = 0;
|
||||
const transactions: MempoolTransactionExtended[] = [];
|
||||
for (const txid of block) {
|
||||
if (txid) {
|
||||
mempoolTx = mempool[txid];
|
||||
// save position in projected blocks
|
||||
mempoolTx.position = {
|
||||
block: blockIndex,
|
||||
vsize: totalVsize + (mempoolTx.vsize / 2),
|
||||
};
|
||||
if (!mempoolTx.cpfpChecked) {
|
||||
if (mempoolTx.ancestors?.length) {
|
||||
mempoolTx.ancestors = [];
|
||||
}
|
||||
if (mempoolTx.descendants?.length) {
|
||||
mempoolTx.descendants = [];
|
||||
}
|
||||
mempoolTx.bestDescendant = null;
|
||||
mempoolTx.cpfpChecked = true;
|
||||
}
|
||||
|
||||
// online calculation of stack-of-blocks fee stats
|
||||
if (hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) {
|
||||
feeStatsCalculator.processNext(mempoolTx);
|
||||
}
|
||||
|
||||
totalSize += mempoolTx.size;
|
||||
totalVsize += mempoolTx.vsize;
|
||||
totalWeight += mempoolTx.weight;
|
||||
totalFees += mempoolTx.fee;
|
||||
|
||||
if (totalVsize <= sizeLimit) {
|
||||
transactions.push(mempoolTx);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.dataToMempoolBlocks(
|
||||
block,
|
||||
transactions,
|
||||
totalSize,
|
||||
totalWeight,
|
||||
totalFees,
|
||||
(hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
|
||||
);
|
||||
});
|
||||
|
||||
if (saveResults) {
|
||||
|
@ -452,16 +551,20 @@ class MempoolBlocks {
|
|||
this.nextUid = 1;
|
||||
}
|
||||
|
||||
private setUid(tx: MempoolTransactionExtended): number {
|
||||
const uid = this.nextUid;
|
||||
this.nextUid++;
|
||||
this.uidMap.set(uid, tx.txid);
|
||||
tx.uid = uid;
|
||||
return uid;
|
||||
private setUid(tx: MempoolTransactionExtended, skipSet = false): number {
|
||||
if (tx.uid === null || tx.uid === undefined || !skipSet) {
|
||||
const uid = this.nextUid;
|
||||
this.nextUid++;
|
||||
this.uidMap.set(uid, tx.txid);
|
||||
tx.uid = uid;
|
||||
return uid;
|
||||
} else {
|
||||
return tx.uid;
|
||||
}
|
||||
}
|
||||
|
||||
private getUid(tx: MempoolTransactionExtended): number | void {
|
||||
if (tx?.uid != null && this.uidMap.has(tx.uid)) {
|
||||
if (tx?.uid !== null && tx?.uid !== undefined && this.uidMap.has(tx.uid)) {
|
||||
return tx.uid;
|
||||
}
|
||||
}
|
||||
|
@ -496,6 +599,28 @@ class MempoolBlocks {
|
|||
}
|
||||
return { blocks: convertedBlocks, rates: convertedRates, clusters: convertedClusters } as { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }};
|
||||
}
|
||||
|
||||
private convertNapiResultTxids({ blocks, blockWeights, rates, clusters }: GbtResult)
|
||||
: { blocks: string[][], blockWeights: number[], rates: [string, number][], clusters: string[][] } {
|
||||
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
|
||||
const txid = this.uidMap.get(uid);
|
||||
if (txid !== undefined) {
|
||||
return txid;
|
||||
} else {
|
||||
throw new Error('GBT returned a block containing a transaction with unknown uid');
|
||||
}
|
||||
}));
|
||||
const convertedRates: [string, number][] = [];
|
||||
for (const [rateUid, rate] of rates) {
|
||||
const rateTxid = this.uidMap.get(rateUid) as string;
|
||||
convertedRates.push([rateTxid, rate]);
|
||||
}
|
||||
const convertedClusters: string[][] = [];
|
||||
for (const cluster of clusters) {
|
||||
convertedClusters.push(cluster.map(uid => this.uidMap.get(uid)) as string[]);
|
||||
}
|
||||
return { blocks: convertedBlocks, blockWeights, rates: convertedRates, clusters: convertedClusters };
|
||||
}
|
||||
}
|
||||
|
||||
export default new MempoolBlocks();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import config from '../config';
|
||||
import bitcoinApi from './bitcoin/bitcoin-api-factory';
|
||||
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
|
||||
import { MempoolTransactionExtended, TransactionExtended, VbytesPerSecond } from '../mempool.interfaces';
|
||||
import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
@ -9,6 +9,7 @@ import loadingIndicators from './loading-indicators';
|
|||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
import bitcoinSecondClient from './bitcoin/bitcoin-second-client';
|
||||
import rbfCache from './rbf-cache';
|
||||
import { IEsploraApi } from './bitcoin/esplora-api.interface';
|
||||
|
||||
class Mempool {
|
||||
private inSync: boolean = false;
|
||||
|
@ -19,7 +20,7 @@ class Mempool {
|
|||
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
|
||||
private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
|
||||
deletedTransactions: MempoolTransactionExtended[]) => void) | undefined;
|
||||
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
|
||||
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, mempoolSize: number, newTransactions: MempoolTransactionExtended[],
|
||||
deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined;
|
||||
|
||||
private txPerSecondArray: number[] = [];
|
||||
|
@ -69,7 +70,7 @@ class Mempool {
|
|||
this.mempoolChangedCallback = fn;
|
||||
}
|
||||
|
||||
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; },
|
||||
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, mempoolSize: number,
|
||||
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void {
|
||||
this.$asyncMempoolChangedCallback = fn;
|
||||
}
|
||||
|
@ -84,20 +85,63 @@ class Mempool {
|
|||
|
||||
public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) {
|
||||
this.mempoolCache = mempoolData;
|
||||
let count = 0;
|
||||
for (const txid of Object.keys(this.mempoolCache)) {
|
||||
if (this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) {
|
||||
if (!this.mempoolCache[txid].sigops || this.mempoolCache[txid].effectiveFeePerVsize == null) {
|
||||
this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]);
|
||||
}
|
||||
if (this.mempoolCache[txid].order == null) {
|
||||
this.mempoolCache[txid].order = transactionUtils.txidToOrdering(txid);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
if (this.mempoolChangedCallback) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, [], []);
|
||||
}
|
||||
if (this.$asyncMempoolChangedCallback) {
|
||||
await this.$asyncMempoolChangedCallback(this.mempoolCache, [], []);
|
||||
await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], []);
|
||||
}
|
||||
this.addToSpendMap(Object.values(this.mempoolCache));
|
||||
}
|
||||
|
||||
public async $reloadMempool(expectedCount: number): Promise<MempoolTransactionExtended[]> {
|
||||
let count = 0;
|
||||
let done = false;
|
||||
let last_txid;
|
||||
const newTransactions: MempoolTransactionExtended[] = [];
|
||||
loadingIndicators.setProgress('mempool', count / expectedCount * 100);
|
||||
while (!done) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getMempoolTransactions(last_txid);
|
||||
if (result) {
|
||||
for (const tx of result) {
|
||||
const extendedTransaction = transactionUtils.extendMempoolTransaction(tx);
|
||||
if (!this.mempoolCache[extendedTransaction.txid]) {
|
||||
newTransactions.push(extendedTransaction);
|
||||
this.mempoolCache[extendedTransaction.txid] = extendedTransaction;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
logger.info(`Fetched ${count} of ${expectedCount} mempool transactions from esplora`);
|
||||
if (result.length > 0) {
|
||||
last_txid = result[result.length - 1].txid;
|
||||
} else {
|
||||
done = true;
|
||||
}
|
||||
if (Math.floor((count / expectedCount) * 100) < 100) {
|
||||
loadingIndicators.setProgress('mempool', count / expectedCount * 100);
|
||||
}
|
||||
} else {
|
||||
done = true;
|
||||
}
|
||||
} catch(err) {
|
||||
logger.err('failed to fetch bulk mempool transactions from esplora');
|
||||
}
|
||||
}
|
||||
return newTransactions;
|
||||
logger.info(`Done inserting loaded mempool transactions into local cache`);
|
||||
}
|
||||
|
||||
public async $updateMemPoolInfo() {
|
||||
this.mempoolInfo = await this.$getMempoolInfo();
|
||||
}
|
||||
|
@ -138,7 +182,7 @@ class Mempool {
|
|||
const currentMempoolSize = Object.keys(this.mempoolCache).length;
|
||||
this.updateTimerProgress(timer, 'got raw mempool');
|
||||
const diff = transactions.length - currentMempoolSize;
|
||||
const newTransactions: MempoolTransactionExtended[] = [];
|
||||
let newTransactions: MempoolTransactionExtended[] = [];
|
||||
|
||||
this.mempoolCacheDelta = Math.abs(diff);
|
||||
|
||||
|
@ -156,41 +200,59 @@ class Mempool {
|
|||
}
|
||||
};
|
||||
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
for (const txid of transactions) {
|
||||
if (!this.mempoolCache[txid]) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getMempoolTransactionExtended(txid, false, false, false);
|
||||
this.updateTimerProgress(timer, 'fetched new transaction');
|
||||
this.mempoolCache[txid] = transaction;
|
||||
if (this.inSync) {
|
||||
this.txPerSecondArray.push(new Date().getTime());
|
||||
this.vBytesPerSecondArray.push({
|
||||
unixTime: new Date().getTime(),
|
||||
vSize: transaction.vsize,
|
||||
});
|
||||
let intervalTimer = Date.now();
|
||||
|
||||
let loaded = false;
|
||||
if (config.MEMPOOL.BACKEND === 'esplora' && currentMempoolSize < transactions.length * 0.5 && transactions.length > 20_000) {
|
||||
this.inSync = false;
|
||||
logger.info(`Missing ${transactions.length - currentMempoolSize} mempool transactions, attempting to reload in bulk from esplora`);
|
||||
try {
|
||||
newTransactions = await this.$reloadMempool(transactions.length);
|
||||
loaded = true;
|
||||
} catch (e) {
|
||||
logger.err('failed to load mempool in bulk from esplora, falling back to fetching individual transactions');
|
||||
}
|
||||
}
|
||||
|
||||
if (!loaded) {
|
||||
for (const txid of transactions) {
|
||||
if (!this.mempoolCache[txid]) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getMempoolTransactionExtended(txid, false, false, false);
|
||||
this.updateTimerProgress(timer, 'fetched new transaction');
|
||||
this.mempoolCache[txid] = transaction;
|
||||
if (this.inSync) {
|
||||
this.txPerSecondArray.push(new Date().getTime());
|
||||
this.vBytesPerSecondArray.push({
|
||||
unixTime: new Date().getTime(),
|
||||
vSize: transaction.vsize,
|
||||
});
|
||||
}
|
||||
hasChange = true;
|
||||
newTransactions.push(transaction);
|
||||
} catch (e: any) {
|
||||
if (config.MEMPOOL.BACKEND === 'esplora' && e.response?.status === 404) {
|
||||
this.missingTxCount++;
|
||||
}
|
||||
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
if (Date.now() - intervalTimer > 5_000) {
|
||||
if (this.inSync) {
|
||||
// Break and restart mempool loop if we spend too much time processing
|
||||
// new transactions that may lead to falling behind on block height
|
||||
logger.debug('Breaking mempool loop because the 5s time limit exceeded.');
|
||||
break;
|
||||
} else {
|
||||
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
|
||||
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
|
||||
if (Math.floor(progress) < 100) {
|
||||
loadingIndicators.setProgress('mempool', progress);
|
||||
}
|
||||
intervalTimer = Date.now()
|
||||
}
|
||||
hasChange = true;
|
||||
newTransactions.push(transaction);
|
||||
} catch (e: any) {
|
||||
if (config.MEMPOOL.BACKEND === 'esplora' && e.response?.status === 404) {
|
||||
this.missingTxCount++;
|
||||
}
|
||||
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 4) {
|
||||
const progress = (currentMempoolSize + newTransactions.length) / transactions.length * 100;
|
||||
logger.debug(`Mempool is synchronizing. Processed ${newTransactions.length}/${diff} txs (${Math.round(progress)}%)`);
|
||||
loadingIndicators.setProgress('mempool', progress);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
// Break and restart mempool loop if we spend too much time processing
|
||||
// new transactions that may lead to falling behind on block height
|
||||
if (this.inSync && (new Date().getTime()) - start > 10_000) {
|
||||
logger.debug('Breaking mempool loop because the 10s time limit exceeded.');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,26 +297,27 @@ class Mempool {
|
|||
}
|
||||
}
|
||||
|
||||
const newMempoolSize = currentMempoolSize + newTransactions.length - deletedTransactions.length;
|
||||
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
|
||||
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
|
||||
|
||||
if (!this.inSync && transactions.length === Object.keys(this.mempoolCache).length) {
|
||||
this.inSync = true;
|
||||
logger.notice('The mempool is now in sync!');
|
||||
loadingIndicators.setProgress('mempool', 100);
|
||||
}
|
||||
|
||||
this.mempoolCacheDelta = Math.abs(transactions.length - Object.keys(this.mempoolCache).length);
|
||||
this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
|
||||
|
||||
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
}
|
||||
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
this.updateTimerProgress(timer, 'running async mempool callback');
|
||||
await this.$asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions);
|
||||
this.updateTimerProgress(timer, 'completed async mempool callback');
|
||||
}
|
||||
|
||||
if (!this.inSync && transactions.length === newMempoolSize) {
|
||||
this.inSync = true;
|
||||
logger.notice('The mempool is now in sync!');
|
||||
loadingIndicators.setProgress('mempool', 100);
|
||||
}
|
||||
|
||||
const end = new Date().getTime();
|
||||
const time = end - start;
|
||||
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
|
||||
|
|
|
@ -106,6 +106,7 @@ class Mining {
|
|||
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
|
||||
slug: poolInfo.slug,
|
||||
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
|
||||
avgFeeDelta: poolInfo.avgFeeDelta,
|
||||
};
|
||||
poolsStats.push(poolStat);
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@ import { Common } from "./common";
|
|||
interface RbfTransaction extends TransactionStripped {
|
||||
rbf?: boolean;
|
||||
mined?: boolean;
|
||||
fullRbf?: boolean;
|
||||
}
|
||||
|
||||
interface RbfTree {
|
||||
|
@ -17,6 +18,16 @@ interface RbfTree {
|
|||
replaces: RbfTree[];
|
||||
}
|
||||
|
||||
export interface ReplacementInfo {
|
||||
mined: boolean;
|
||||
fullRbf: boolean;
|
||||
txid: string;
|
||||
oldFee: number;
|
||||
oldVsize: number;
|
||||
newFee: number;
|
||||
newVsize: number;
|
||||
}
|
||||
|
||||
class RbfCache {
|
||||
private replacedBy: Map<string, string> = new Map();
|
||||
private replaces: Map<string, string[]> = new Map();
|
||||
|
@ -41,11 +52,15 @@ class RbfCache {
|
|||
this.txs.set(newTx.txid, newTxExtended);
|
||||
|
||||
// maintain rbf trees
|
||||
let fullRbf = false;
|
||||
let txFullRbf = false;
|
||||
let treeFullRbf = false;
|
||||
const replacedTrees: RbfTree[] = [];
|
||||
for (const replacedTxExtended of replaced) {
|
||||
const replacedTx = Common.stripTransaction(replacedTxExtended) as RbfTransaction;
|
||||
replacedTx.rbf = replacedTxExtended.vin.some((v) => v.sequence < 0xfffffffe);
|
||||
if (!replacedTx.rbf) {
|
||||
txFullRbf = true;
|
||||
}
|
||||
this.replacedBy.set(replacedTx.txid, newTx.txid);
|
||||
if (this.treeMap.has(replacedTx.txid)) {
|
||||
const treeId = this.treeMap.get(replacedTx.txid);
|
||||
|
@ -55,7 +70,7 @@ class RbfCache {
|
|||
if (tree) {
|
||||
tree.interval = newTime - tree?.time;
|
||||
replacedTrees.push(tree);
|
||||
fullRbf = fullRbf || tree.fullRbf;
|
||||
treeFullRbf = treeFullRbf || tree.fullRbf || !tree.tx.rbf;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -67,15 +82,16 @@ class RbfCache {
|
|||
fullRbf: !replacedTx.rbf,
|
||||
replaces: [],
|
||||
});
|
||||
fullRbf = fullRbf || !replacedTx.rbf;
|
||||
treeFullRbf = treeFullRbf || !replacedTx.rbf;
|
||||
this.txs.set(replacedTx.txid, replacedTxExtended);
|
||||
}
|
||||
}
|
||||
newTx.fullRbf = txFullRbf;
|
||||
const treeId = replacedTrees[0].tx.txid;
|
||||
const newTree = {
|
||||
tx: newTx,
|
||||
time: newTime,
|
||||
fullRbf,
|
||||
fullRbf: treeFullRbf,
|
||||
replaces: replacedTrees
|
||||
};
|
||||
this.rbfTrees.set(treeId, newTree);
|
||||
|
@ -84,6 +100,24 @@ class RbfCache {
|
|||
this.dirtyTrees.add(treeId);
|
||||
}
|
||||
|
||||
public has(txId: string): boolean {
|
||||
return this.txs.has(txId);
|
||||
}
|
||||
|
||||
public anyInSameTree(txId: string, predicate: (tx: RbfTransaction) => boolean): boolean {
|
||||
const tree = this.getRbfTree(txId);
|
||||
if (!tree) {
|
||||
return false;
|
||||
}
|
||||
const txs = this.getTransactionsInTree(tree);
|
||||
for (const tx of txs) {
|
||||
if (predicate(tx)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public getReplacedBy(txId: string): string | undefined {
|
||||
return this.replacedBy.get(txId);
|
||||
}
|
||||
|
@ -169,6 +203,19 @@ class RbfCache {
|
|||
}
|
||||
}
|
||||
|
||||
// is the transaction involved in a full rbf replacement?
|
||||
public isFullRbf(txid: string): boolean {
|
||||
const treeId = this.treeMap.get(txid);
|
||||
if (!treeId) {
|
||||
return false;
|
||||
}
|
||||
const tree = this.rbfTrees.get(treeId);
|
||||
if (!tree) {
|
||||
return false;
|
||||
}
|
||||
return tree?.fullRbf;
|
||||
}
|
||||
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
for (const txid of this.expiring.keys()) {
|
||||
|
@ -336,6 +383,27 @@ class RbfCache {
|
|||
}
|
||||
return tree;
|
||||
}
|
||||
|
||||
public getLatestRbfSummary(): ReplacementInfo[] {
|
||||
const rbfList = this.getRbfTrees(false);
|
||||
return rbfList.slice(0, 6).map(rbfTree => {
|
||||
let oldFee = 0;
|
||||
let oldVsize = 0;
|
||||
for (const replaced of rbfTree.replaces) {
|
||||
oldFee += replaced.tx.fee;
|
||||
oldVsize += replaced.tx.vsize;
|
||||
}
|
||||
return {
|
||||
txid: rbfTree.tx.txid,
|
||||
mined: !!rbfTree.tx.mined,
|
||||
fullRbf: !!rbfTree.tx.fullRbf,
|
||||
oldFee,
|
||||
oldVsize,
|
||||
newFee: rbfTree.tx.fee,
|
||||
newVsize: rbfTree.tx.vsize,
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default new RbfCache();
|
||||
|
|
|
@ -211,7 +211,7 @@ class StatisticsApi {
|
|||
CAST(avg(vsize_1800) as DOUBLE) as vsize_1800,
|
||||
CAST(avg(vsize_2000) as DOUBLE) as vsize_2000 \
|
||||
FROM statistics \
|
||||
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
|
||||
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
|
||||
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
|
||||
ORDER BY statistics.added DESC;`;
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ class StatisticsApi {
|
|||
vsize_1800,
|
||||
vsize_2000 \
|
||||
FROM statistics \
|
||||
WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW() \
|
||||
${interval === 'all' ? '' : `WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`} \
|
||||
GROUP BY UNIX_TIMESTAMP(added) DIV ${div} \
|
||||
ORDER BY statistics.added DESC;`;
|
||||
}
|
||||
|
@ -386,6 +386,17 @@ class StatisticsApi {
|
|||
}
|
||||
}
|
||||
|
||||
public async $listAll(): Promise<OptimizedStatistic[]> {
|
||||
try {
|
||||
const query = this.getQueryForDays(43200, 'all'); // 12h interval
|
||||
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
|
||||
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
|
||||
} catch (e) {
|
||||
logger.err('$listAll() error' + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
|
||||
return statistic.map((s) => {
|
||||
return {
|
||||
|
|
|
@ -15,10 +15,11 @@ class StatisticsRoutes {
|
|||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/4y', this.$getStatisticsByTime.bind(this, '4y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/all', this.$getStatisticsByTime.bind(this, 'all'))
|
||||
;
|
||||
}
|
||||
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y', req: Request, res: Response) {
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y' | 'all', req: Request, res: Response) {
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
|
@ -26,10 +27,6 @@ class StatisticsRoutes {
|
|||
try {
|
||||
let result;
|
||||
switch (time as string) {
|
||||
case '2h':
|
||||
result = await statisticsApi.$list2H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
|
||||
break;
|
||||
case '24h':
|
||||
result = await statisticsApi.$list24H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
|
@ -58,8 +55,13 @@ class StatisticsRoutes {
|
|||
case '4y':
|
||||
result = await statisticsApi.$list4Y();
|
||||
break;
|
||||
case 'all':
|
||||
result = await statisticsApi.$listAll();
|
||||
break;
|
||||
default:
|
||||
result = await statisticsApi.$list2H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
|
||||
break;
|
||||
}
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
|
|
|
@ -35,6 +35,13 @@ class TransactionUtils {
|
|||
} else {
|
||||
transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
|
||||
}
|
||||
|
||||
if (Common.isLiquid()) {
|
||||
if (!isFinite(Number(transaction.fee))) {
|
||||
transaction.fee = Object.values(transaction.fee || {}).reduce((total, output) => total + output, 0);
|
||||
}
|
||||
}
|
||||
|
||||
if (addMempoolData || !transaction?.status?.confirmed) {
|
||||
return this.extendMempoolTransaction(transaction);
|
||||
} else {
|
||||
|
@ -46,14 +53,13 @@ class TransactionUtils {
|
|||
return (await this.$getTransactionExtended(txId, addPrevouts, lazyPrevouts, forceCore, true)) as MempoolTransactionExtended;
|
||||
}
|
||||
|
||||
private extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
|
||||
public extendTransaction(transaction: IEsploraApi.Transaction): TransactionExtended {
|
||||
// @ts-ignore
|
||||
if (transaction.vsize) {
|
||||
// @ts-ignore
|
||||
return transaction;
|
||||
}
|
||||
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
|
||||
(transaction.fee || 0) / (transaction.weight / 4));
|
||||
const feePerVbytes = (transaction.fee || 0) / (transaction.weight / 4);
|
||||
const transactionExtended: TransactionExtended = Object.assign({
|
||||
vsize: Math.round(transaction.weight / 4),
|
||||
feePerVsize: feePerVbytes,
|
||||
|
@ -68,14 +74,13 @@ class TransactionUtils {
|
|||
public extendMempoolTransaction(transaction: IEsploraApi.Transaction): MempoolTransactionExtended {
|
||||
const vsize = Math.ceil(transaction.weight / 4);
|
||||
const fractionalVsize = (transaction.weight / 4);
|
||||
const sigops = this.countSigops(transaction);
|
||||
const sigops = !Common.isLiquid() ? this.countSigops(transaction) : 0;
|
||||
// https://github.com/bitcoin/bitcoin/blob/e9262ea32a6e1d364fb7974844fadc36f931f8c6/src/policy/policy.cpp#L295-L298
|
||||
const adjustedVsize = Math.max(fractionalVsize, sigops * 5); // adjusted vsize = Max(weight, sigops * bytes_per_sigop) / witness_scale_factor
|
||||
const feePerVbytes = Math.max(Common.isLiquid() ? 0.1 : 1,
|
||||
(transaction.fee || 0) / fractionalVsize);
|
||||
const adjustedFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1,
|
||||
(transaction.fee || 0) / adjustedVsize);
|
||||
const feePerVbytes = (transaction.fee || 0) / fractionalVsize;
|
||||
const adjustedFeePerVsize = (transaction.fee || 0) / adjustedVsize;
|
||||
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
|
||||
order: this.txidToOrdering(transaction.txid),
|
||||
vsize: Math.round(transaction.weight / 4),
|
||||
adjustedVsize,
|
||||
sigops,
|
||||
|
@ -154,6 +159,17 @@ class TransactionUtils {
|
|||
|
||||
return sigops;
|
||||
}
|
||||
|
||||
// returns the most significant 4 bytes of the txid as an integer
|
||||
public txidToOrdering(txid: string): number {
|
||||
return parseInt(
|
||||
txid.substr(62, 2) +
|
||||
txid.substr(60, 2) +
|
||||
txid.substr(58, 2) +
|
||||
txid.substr(56, 2),
|
||||
16
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default new TransactionUtils();
|
||||
|
|
|
@ -12,7 +12,7 @@ import { Common } from './common';
|
|||
import loadingIndicators from './loading-indicators';
|
||||
import config from '../config';
|
||||
import transactionUtils from './transaction-utils';
|
||||
import rbfCache from './rbf-cache';
|
||||
import rbfCache, { ReplacementInfo } from './rbf-cache';
|
||||
import difficultyAdjustment from './difficulty-adjustment';
|
||||
import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
|
@ -22,6 +22,14 @@ import { deepClone } from '../utils/clone';
|
|||
import priceUpdater from '../tasks/price-updater';
|
||||
import { ApiPrice } from '../repositories/PricesRepository';
|
||||
|
||||
// valid 'want' subscriptions
|
||||
const wantable = [
|
||||
'blocks',
|
||||
'mempool-blocks',
|
||||
'live-2h-chart',
|
||||
'stats',
|
||||
];
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
private extraInitProperties = {};
|
||||
|
@ -30,8 +38,9 @@ class WebsocketHandler {
|
|||
private numConnected = 0;
|
||||
private numDisconnected = 0;
|
||||
|
||||
private initData: { [key: string]: string } = {};
|
||||
private socketData: { [key: string]: string } = {};
|
||||
private serializedInitData: string = '{}';
|
||||
private lastRbfSummary: ReplacementInfo | null = null;
|
||||
|
||||
constructor() { }
|
||||
|
||||
|
@ -39,28 +48,28 @@ class WebsocketHandler {
|
|||
this.wss = wss;
|
||||
}
|
||||
|
||||
setExtraInitProperties(property: string, value: any) {
|
||||
setExtraInitData(property: string, value: any) {
|
||||
this.extraInitProperties[property] = value;
|
||||
this.setInitDataFields(this.extraInitProperties);
|
||||
this.updateSocketDataFields(this.extraInitProperties);
|
||||
}
|
||||
|
||||
private setInitDataFields(data: { [property: string]: any }): void {
|
||||
private updateSocketDataFields(data: { [property: string]: any }): void {
|
||||
for (const property of Object.keys(data)) {
|
||||
if (data[property] != null) {
|
||||
this.initData[property] = JSON.stringify(data[property]);
|
||||
this.socketData[property] = JSON.stringify(data[property]);
|
||||
} else {
|
||||
delete this.initData[property];
|
||||
delete this.socketData[property];
|
||||
}
|
||||
}
|
||||
this.serializedInitData = '{'
|
||||
+ Object.keys(this.initData).map(key => `"${key}": ${this.initData[key]}`).join(', ')
|
||||
+ '}';
|
||||
+ Object.keys(this.socketData).map(key => `"${key}": ${this.socketData[key]}`).join(', ')
|
||||
+ '}';
|
||||
}
|
||||
|
||||
private updateInitData(): void {
|
||||
private updateSocketData(): void {
|
||||
const _blocks = blocks.getBlocks().slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT);
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
this.setInitDataFields({
|
||||
this.updateSocketDataFields({
|
||||
'mempoolInfo': memPool.getMempoolInfo(),
|
||||
'vBytesPerSecond': memPool.getVBytesPerSecond(),
|
||||
'blocks': _blocks,
|
||||
|
@ -94,11 +103,33 @@ class WebsocketHandler {
|
|||
const parsedMessage: WebsocketResponse = JSON.parse(message);
|
||||
const response = {};
|
||||
|
||||
if (parsedMessage.action === 'want') {
|
||||
client['want-blocks'] = parsedMessage.data.indexOf('blocks') > -1;
|
||||
client['want-mempool-blocks'] = parsedMessage.data.indexOf('mempool-blocks') > -1;
|
||||
client['want-live-2h-chart'] = parsedMessage.data.indexOf('live-2h-chart') > -1;
|
||||
client['want-stats'] = parsedMessage.data.indexOf('stats') > -1;
|
||||
const wantNow = {};
|
||||
if (parsedMessage && parsedMessage.action === 'want' && Array.isArray(parsedMessage.data)) {
|
||||
for (const sub of wantable) {
|
||||
const key = `want-${sub}`;
|
||||
const wants = parsedMessage.data.includes(sub);
|
||||
if (wants && client['wants'] && !client[key]) {
|
||||
wantNow[key] = true;
|
||||
}
|
||||
client[key] = wants;
|
||||
}
|
||||
client['wants'] = true;
|
||||
}
|
||||
|
||||
// send initial data when a client first starts a subscription
|
||||
if (wantNow['want-blocks'] || (parsedMessage && parsedMessage['refresh-blocks'])) {
|
||||
response['blocks'] = this.socketData['blocks'];
|
||||
}
|
||||
|
||||
if (wantNow['want-mempool-blocks']) {
|
||||
response['mempool-blocks'] = this.socketData['mempool-blocks'];
|
||||
}
|
||||
|
||||
if (wantNow['want-stats']) {
|
||||
response['mempoolInfo'] = this.socketData['mempoolInfo'];
|
||||
response['vBytesPerSecond'] = this.socketData['vBytesPerSecond'];
|
||||
response['fees'] = this.socketData['fees'];
|
||||
response['da'] = this.socketData['da'];
|
||||
}
|
||||
|
||||
if (parsedMessage && parsedMessage['track-tx']) {
|
||||
|
@ -109,21 +140,21 @@ class WebsocketHandler {
|
|||
if (parsedMessage['watch-mempool']) {
|
||||
const rbfCacheTxid = rbfCache.getReplacedBy(trackTxid);
|
||||
if (rbfCacheTxid) {
|
||||
response['txReplaced'] = {
|
||||
response['txReplaced'] = JSON.stringify({
|
||||
txid: rbfCacheTxid,
|
||||
};
|
||||
});
|
||||
client['track-tx'] = null;
|
||||
} else {
|
||||
// It might have appeared before we had the time to start watching for it
|
||||
const tx = memPool.getMempool()[trackTxid];
|
||||
if (tx) {
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
response['tx'] = tx;
|
||||
response['tx'] = JSON.stringify(tx);
|
||||
} else {
|
||||
// tx.prevout is missing from transactions when in bitcoind mode
|
||||
try {
|
||||
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
|
||||
response['tx'] = fullTx;
|
||||
response['tx'] = JSON.stringify(fullTx);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
@ -131,7 +162,7 @@ class WebsocketHandler {
|
|||
} else {
|
||||
try {
|
||||
const fullTx = await transactionUtils.$getMempoolTransactionExtended(client['track-tx'], true);
|
||||
response['tx'] = fullTx;
|
||||
response['tx'] = JSON.stringify(fullTx);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction. ' + (e instanceof Error ? e.message : e));
|
||||
client['track-mempool-tx'] = parsedMessage['track-tx'];
|
||||
|
@ -141,10 +172,10 @@ class WebsocketHandler {
|
|||
}
|
||||
const tx = memPool.getMempool()[trackTxid];
|
||||
if (tx && tx.position) {
|
||||
response['txPosition'] = {
|
||||
response['txPosition'] = JSON.stringify({
|
||||
txid: trackTxid,
|
||||
position: tx.position,
|
||||
};
|
||||
});
|
||||
}
|
||||
} else {
|
||||
client['track-tx'] = null;
|
||||
|
@ -152,15 +183,22 @@ class WebsocketHandler {
|
|||
}
|
||||
|
||||
if (parsedMessage && parsedMessage['track-address']) {
|
||||
if (/^([a-km-zA-HJ-NP-Z1-9]{26,35}|[a-km-zA-HJ-NP-Z1-9]{80}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100})$/
|
||||
if (/^([a-km-zA-HJ-NP-Z1-9]{26,35}|[a-km-zA-HJ-NP-Z1-9]{80}|[a-z]{2,5}1[ac-hj-np-z02-9]{8,100}|[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}|[0-9a-fA-F]{130})$/
|
||||
.test(parsedMessage['track-address'])) {
|
||||
let matchedAddress = parsedMessage['track-address'];
|
||||
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}$/.test(parsedMessage['track-address'])) {
|
||||
matchedAddress = matchedAddress.toLowerCase();
|
||||
}
|
||||
client['track-address'] = matchedAddress;
|
||||
if (/^[0-9a-fA-F]{130}$/.test(parsedMessage['track-address'])) {
|
||||
client['track-address'] = null;
|
||||
client['track-scriptpubkey'] = '41' + matchedAddress + 'ac';
|
||||
} else {
|
||||
client['track-address'] = matchedAddress;
|
||||
client['track-scriptpubkey'] = null;
|
||||
}
|
||||
} else {
|
||||
client['track-address'] = null;
|
||||
client['track-scriptpubkey'] = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,10 +215,10 @@ class WebsocketHandler {
|
|||
const index = parsedMessage['track-mempool-block'];
|
||||
client['track-mempool-block'] = index;
|
||||
const mBlocksWithTransactions = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
response['projected-block-transactions'] = {
|
||||
response['projected-block-transactions'] = JSON.stringify({
|
||||
index: index,
|
||||
blockTransactions: mBlocksWithTransactions[index]?.transactions || [],
|
||||
};
|
||||
});
|
||||
} else {
|
||||
client['track-mempool-block'] = null;
|
||||
}
|
||||
|
@ -189,23 +227,35 @@ class WebsocketHandler {
|
|||
if (parsedMessage && parsedMessage['track-rbf'] !== undefined) {
|
||||
if (['all', 'fullRbf'].includes(parsedMessage['track-rbf'])) {
|
||||
client['track-rbf'] = parsedMessage['track-rbf'];
|
||||
response['rbfLatest'] = JSON.stringify(rbfCache.getRbfTrees(parsedMessage['track-rbf'] === 'fullRbf'));
|
||||
} else {
|
||||
client['track-rbf'] = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (parsedMessage.action === 'init') {
|
||||
if (!this.initData['blocks']?.length || !this.initData['da']) {
|
||||
this.updateInitData();
|
||||
if (parsedMessage && parsedMessage['track-rbf-summary'] != null) {
|
||||
if (parsedMessage['track-rbf-summary']) {
|
||||
client['track-rbf-summary'] = true;
|
||||
if (this.socketData['rbfSummary'] != null) {
|
||||
response['rbfLatestSummary'] = this.socketData['rbfSummary'];
|
||||
}
|
||||
} else {
|
||||
client['track-rbf-summary'] = false;
|
||||
}
|
||||
if (!this.initData['blocks']?.length) {
|
||||
}
|
||||
|
||||
if (parsedMessage.action === 'init') {
|
||||
if (!this.socketData['blocks']?.length || !this.socketData['da'] || !this.socketData['backendInfo'] || !this.socketData['conversions']) {
|
||||
this.updateSocketData();
|
||||
}
|
||||
if (!this.socketData['blocks']?.length) {
|
||||
return;
|
||||
}
|
||||
client.send(this.serializedInitData);
|
||||
}
|
||||
|
||||
if (parsedMessage.action === 'ping') {
|
||||
response['pong'] = true;
|
||||
response['pong'] = JSON.stringify(true);
|
||||
}
|
||||
|
||||
if (parsedMessage['track-donation'] && parsedMessage['track-donation'].length === 22) {
|
||||
|
@ -221,7 +271,8 @@ class WebsocketHandler {
|
|||
}
|
||||
|
||||
if (Object.keys(response).length) {
|
||||
client.send(JSON.stringify(response));
|
||||
const serializedResponse = this.serializeResponse(response);
|
||||
client.send(serializedResponse);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug('Error parsing websocket message: ' + (e instanceof Error ? e.message : e));
|
||||
|
@ -250,7 +301,7 @@ class WebsocketHandler {
|
|||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.setInitDataFields({ 'loadingIndicators': indicators });
|
||||
this.updateSocketDataFields({ 'loadingIndicators': indicators });
|
||||
|
||||
const response = JSON.stringify({ loadingIndicators: indicators });
|
||||
this.wss.clients.forEach((client) => {
|
||||
|
@ -266,7 +317,7 @@ class WebsocketHandler {
|
|||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.setInitDataFields({ 'conversions': conversionRates });
|
||||
this.updateSocketDataFields({ 'conversions': conversionRates });
|
||||
|
||||
const response = JSON.stringify({ conversions: conversionRates });
|
||||
this.wss.clients.forEach((client) => {
|
||||
|
@ -301,7 +352,41 @@ class WebsocketHandler {
|
|||
});
|
||||
}
|
||||
|
||||
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended },
|
||||
handleReorg(): void {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
|
||||
// update init data
|
||||
this.updateSocketDataFields({
|
||||
'blocks': blocks.getBlocks(),
|
||||
'da': da?.previousTime ? da : undefined,
|
||||
});
|
||||
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
const response = {};
|
||||
|
||||
if (client['want-blocks']) {
|
||||
response['blocks'] = this.socketData['blocks'];
|
||||
}
|
||||
if (client['want-stats']) {
|
||||
response['da'] = this.socketData['da'];
|
||||
}
|
||||
|
||||
if (Object.keys(response).length) {
|
||||
const serializedResponse = this.serializeResponse(response);
|
||||
client.send(serializedResponse);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number,
|
||||
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
|
@ -310,7 +395,11 @@ class WebsocketHandler {
|
|||
this.printLogs();
|
||||
|
||||
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
|
||||
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
|
||||
if (config.MEMPOOL.RUST_GBT) {
|
||||
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions);
|
||||
} else {
|
||||
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
|
||||
}
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(newMempool, true);
|
||||
}
|
||||
|
@ -325,10 +414,13 @@ class WebsocketHandler {
|
|||
const rbfChanges = rbfCache.getRbfChanges();
|
||||
let rbfReplacements;
|
||||
let fullRbfReplacements;
|
||||
let rbfSummary;
|
||||
if (Object.keys(rbfChanges.trees).length) {
|
||||
rbfReplacements = rbfCache.getRbfTrees(false);
|
||||
fullRbfReplacements = rbfCache.getRbfTrees(true);
|
||||
rbfSummary = rbfCache.getLatestRbfSummary();
|
||||
}
|
||||
|
||||
for (const deletedTx of deletedTransactions) {
|
||||
rbfCache.evict(deletedTx.txid);
|
||||
}
|
||||
|
@ -336,11 +428,25 @@ class WebsocketHandler {
|
|||
memPool.addToSpendMap(newTransactions);
|
||||
const recommendedFees = feeApi.getRecommendedFee();
|
||||
|
||||
const latestTransactions = memPool.getLatestTransactions();
|
||||
|
||||
// update init data
|
||||
this.updateInitData();
|
||||
const socketDataFields = {
|
||||
'mempoolInfo': mempoolInfo,
|
||||
'vBytesPerSecond': vBytesPerSecond,
|
||||
'mempool-blocks': mBlocks,
|
||||
'transactions': latestTransactions,
|
||||
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
|
||||
'da': da?.previousTime ? da : undefined,
|
||||
'fees': recommendedFees,
|
||||
};
|
||||
if (rbfSummary) {
|
||||
socketDataFields['rbfSummary'] = rbfSummary;
|
||||
}
|
||||
this.updateSocketDataFields(socketDataFields);
|
||||
|
||||
// cache serialized objects to avoid stringify-ing the same thing for every client
|
||||
const responseCache = { ...this.initData };
|
||||
const responseCache = { ...this.socketData };
|
||||
function getCachedResponse(key: string, data): string {
|
||||
if (!responseCache[key]) {
|
||||
responseCache[key] = JSON.stringify(data);
|
||||
|
@ -371,8 +477,6 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
const latestTransactions = newTransactions.slice(0, 6).map((tx) => Common.stripTransaction(tx));
|
||||
|
||||
this.wss.clients.forEach(async (client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
|
@ -449,6 +553,44 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
if (client['track-scriptpubkey']) {
|
||||
const foundTransactions: TransactionExtended[] = [];
|
||||
|
||||
for (const tx of newTransactions) {
|
||||
const someVin = tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_type === 'p2pk' && vin.prevout.scriptpubkey === client['track-scriptpubkey']);
|
||||
if (someVin) {
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
try {
|
||||
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
|
||||
foundTransactions.push(fullTx);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
} else {
|
||||
foundTransactions.push(tx);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const someVout = tx.vout.some((vout) => vout.scriptpubkey_type === 'p2pk' && vout.scriptpubkey === client['track-scriptpubkey']);
|
||||
if (someVout) {
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
try {
|
||||
const fullTx = await transactionUtils.$getMempoolTransactionExtended(tx.txid, true);
|
||||
foundTransactions.push(fullTx);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
} else {
|
||||
foundTransactions.push(tx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundTransactions.length) {
|
||||
response['address-transactions'] = JSON.stringify(foundTransactions);
|
||||
}
|
||||
}
|
||||
|
||||
if (client['track-asset']) {
|
||||
const foundTransactions: TransactionExtended[] = [];
|
||||
|
||||
|
@ -490,7 +632,7 @@ class WebsocketHandler {
|
|||
if (rbfReplacedBy) {
|
||||
response['rbfTransaction'] = JSON.stringify({
|
||||
txid: rbfReplacedBy,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
const rbfChange = rbfChanges.map[client['track-tx']];
|
||||
|
@ -507,7 +649,7 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
if (client['track-mempool-block'] >= 0) {
|
||||
if (client['track-mempool-block'] >= 0 && memPool.isInSync()) {
|
||||
const index = client['track-mempool-block'];
|
||||
if (mBlockDeltas[index]) {
|
||||
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
|
||||
|
@ -523,16 +665,18 @@ class WebsocketHandler {
|
|||
response['rbfLatest'] = getCachedResponse('fullrbfLatest', fullRbfReplacements);
|
||||
}
|
||||
|
||||
if (client['track-rbf-summary'] && rbfSummary) {
|
||||
response['rbfLatestSummary'] = getCachedResponse('rbfLatestSummary', rbfSummary);
|
||||
}
|
||||
|
||||
if (Object.keys(response).length) {
|
||||
const serializedResponse = '{'
|
||||
+ Object.keys(response).map(key => `"${key}": ${response[key]}`).join(', ')
|
||||
+ '}';
|
||||
const serializedResponse = this.serializeResponse(response);
|
||||
client.send(serializedResponse);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
|
||||
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
@ -541,7 +685,11 @@ class WebsocketHandler {
|
|||
|
||||
const _memPool = memPool.getMempool();
|
||||
|
||||
if (config.MEMPOOL.AUDIT) {
|
||||
const rbfTransactions = Common.findMinedRbfTransactions(transactions, memPool.getSpendMap());
|
||||
memPool.handleMinedRbfTransactions(rbfTransactions);
|
||||
memPool.removeFromSpendMap(transactions);
|
||||
|
||||
if (config.MEMPOOL.AUDIT && memPool.isInSync()) {
|
||||
let projectedBlocks;
|
||||
let auditMempool = _memPool;
|
||||
// template calculation functions have mempool side effects, so calculate audits using
|
||||
|
@ -550,7 +698,11 @@ class WebsocketHandler {
|
|||
if (separateAudit) {
|
||||
auditMempool = deepClone(_memPool);
|
||||
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
|
||||
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
|
||||
if (config.MEMPOOL.RUST_GBT) {
|
||||
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool);
|
||||
} else {
|
||||
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
|
||||
}
|
||||
} else {
|
||||
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
|
||||
}
|
||||
|
@ -558,18 +710,11 @@ class WebsocketHandler {
|
|||
projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
}
|
||||
|
||||
if (Common.indexingEnabled() && memPool.isInSync()) {
|
||||
const { censored, added, fresh, sigop, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
|
||||
if (Common.indexingEnabled()) {
|
||||
const { censored, added, fresh, sigop, fullrbf, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
|
||||
const matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
}) : [];
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : [];
|
||||
|
||||
let totalFees = 0;
|
||||
let totalWeight = 0;
|
||||
|
@ -594,6 +739,7 @@ class WebsocketHandler {
|
|||
missingTxs: censored,
|
||||
freshTxs: fresh,
|
||||
sigopTxs: sigop,
|
||||
fullrbfTxs: fullrbf,
|
||||
matchRate: matchRate,
|
||||
expectedFees: totalFees,
|
||||
expectedWeight: totalWeight,
|
||||
|
@ -613,10 +759,6 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
const rbfTransactions = Common.findMinedRbfTransactions(transactions, memPool.getSpendMap());
|
||||
memPool.handleMinedRbfTransactions(rbfTransactions);
|
||||
memPool.removeFromSpendMap(transactions);
|
||||
|
||||
// Update mempool to remove transactions included in the new block
|
||||
for (const txId of txIds) {
|
||||
delete _memPool[txId];
|
||||
|
@ -624,7 +766,11 @@ class WebsocketHandler {
|
|||
}
|
||||
|
||||
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
|
||||
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
|
||||
if (config.MEMPOOL.RUST_GBT) {
|
||||
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions);
|
||||
} else {
|
||||
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
|
||||
}
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool, true);
|
||||
}
|
||||
|
@ -633,11 +779,19 @@ class WebsocketHandler {
|
|||
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
const fees = feeApi.getRecommendedFee();
|
||||
const mempoolInfo = memPool.getMempoolInfo();
|
||||
|
||||
// update init data
|
||||
this.updateInitData();
|
||||
this.updateSocketDataFields({
|
||||
'mempoolInfo': mempoolInfo,
|
||||
'blocks': [...blocks.getBlocks(), block].slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT),
|
||||
'mempool-blocks': mBlocks,
|
||||
'loadingIndicators': loadingIndicators.getLoadingIndicators(),
|
||||
'da': da?.previousTime ? da : undefined,
|
||||
'fees': fees,
|
||||
});
|
||||
|
||||
const responseCache = { ...this.initData };
|
||||
const responseCache = { ...this.socketData };
|
||||
function getCachedResponse(key, data): string {
|
||||
if (!responseCache[key]) {
|
||||
responseCache[key] = JSON.stringify(data);
|
||||
|
@ -645,22 +799,26 @@ class WebsocketHandler {
|
|||
return responseCache[key];
|
||||
}
|
||||
|
||||
const mempoolInfo = memPool.getMempoolInfo();
|
||||
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!client['want-blocks']) {
|
||||
return;
|
||||
const response = {};
|
||||
|
||||
if (client['want-blocks']) {
|
||||
response['block'] = getCachedResponse('block', block);
|
||||
}
|
||||
|
||||
const response = {};
|
||||
response['block'] = getCachedResponse('block', block);
|
||||
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
|
||||
response['da'] = getCachedResponse('da', da?.previousTime ? da : undefined);
|
||||
response['fees'] = getCachedResponse('fees', fees);
|
||||
if (client['want-stats']) {
|
||||
response['mempoolInfo'] = getCachedResponse('mempoolInfo', mempoolInfo);
|
||||
response['vBytesPerSecond'] = getCachedResponse('vBytesPerSecond', memPool.getVBytesPerSecond());
|
||||
response['fees'] = getCachedResponse('fees', fees);
|
||||
|
||||
if (da?.previousTime) {
|
||||
response['da'] = getCachedResponse('da', da);
|
||||
}
|
||||
}
|
||||
|
||||
if (mBlocks && client['want-mempool-blocks']) {
|
||||
response['mempool-blocks'] = getCachedResponse('mempool-blocks', mBlocks);
|
||||
|
@ -708,6 +866,33 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
if (client['track-scriptpubkey']) {
|
||||
const foundTransactions: TransactionExtended[] = [];
|
||||
|
||||
transactions.forEach((tx) => {
|
||||
if (tx.vin && tx.vin.some((vin) => !!vin.prevout && vin.prevout.scriptpubkey_type === 'p2pk' && vin.prevout.scriptpubkey === client['track-scriptpubkey'])) {
|
||||
foundTransactions.push(tx);
|
||||
return;
|
||||
}
|
||||
if (tx.vout && tx.vout.some((vout) => vout.scriptpubkey_type === 'p2pk' && vout.scriptpubkey === client['track-scriptpubkey'])) {
|
||||
foundTransactions.push(tx);
|
||||
}
|
||||
});
|
||||
|
||||
if (foundTransactions.length) {
|
||||
foundTransactions.forEach((tx) => {
|
||||
tx.status = {
|
||||
confirmed: true,
|
||||
block_height: block.height,
|
||||
block_hash: block.id,
|
||||
block_time: block.timestamp,
|
||||
};
|
||||
});
|
||||
|
||||
response['block-transactions'] = JSON.stringify(foundTransactions);
|
||||
}
|
||||
}
|
||||
|
||||
if (client['track-asset']) {
|
||||
const foundTransactions: TransactionExtended[] = [];
|
||||
|
||||
|
@ -745,7 +930,7 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
if (client['track-mempool-block'] >= 0) {
|
||||
if (client['track-mempool-block'] >= 0 && memPool.isInSync()) {
|
||||
const index = client['track-mempool-block'];
|
||||
if (mBlockDeltas && mBlockDeltas[index]) {
|
||||
response['projected-block-transactions'] = getCachedResponse(`projected-block-transactions-${index}`, {
|
||||
|
@ -755,11 +940,19 @@ class WebsocketHandler {
|
|||
}
|
||||
}
|
||||
|
||||
const serializedResponse = '{'
|
||||
if (Object.keys(response).length) {
|
||||
const serializedResponse = this.serializeResponse(response);
|
||||
client.send(serializedResponse);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// takes a dictionary of JSON serialized values
|
||||
// and zips it together into a valid JSON object
|
||||
private serializeResponse(response): string {
|
||||
return '{'
|
||||
+ Object.keys(response).map(key => `"${key}": ${response[key]}`).join(', ')
|
||||
+ '}';
|
||||
client.send(serializedResponse);
|
||||
});
|
||||
}
|
||||
|
||||
private printLogs(): void {
|
||||
|
|
|
@ -31,9 +31,12 @@ interface IConfig {
|
|||
AUDIT: boolean;
|
||||
ADVANCED_GBT_AUDIT: boolean;
|
||||
ADVANCED_GBT_MEMPOOL: boolean;
|
||||
RUST_GBT: boolean;
|
||||
CPFP_INDEXING: boolean;
|
||||
MAX_BLOCKS_BULK_QUERY: number;
|
||||
DISK_CACHE_BLOCK_INTERVAL: number;
|
||||
MAX_PUSH_TX_SIZE_WEIGHT: number;
|
||||
ALLOW_UNREACHABLE: boolean;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
|
@ -129,6 +132,12 @@ interface IConfig {
|
|||
GEOLITE2_ASN: string;
|
||||
GEOIP2_ISP: string;
|
||||
},
|
||||
REPLICATION: {
|
||||
ENABLED: boolean;
|
||||
AUDIT: boolean;
|
||||
AUDIT_START_HEIGHT: number;
|
||||
SERVERS: string[];
|
||||
}
|
||||
}
|
||||
|
||||
const defaults: IConfig = {
|
||||
|
@ -160,9 +169,12 @@ const defaults: IConfig = {
|
|||
'AUDIT': false,
|
||||
'ADVANCED_GBT_AUDIT': false,
|
||||
'ADVANCED_GBT_MEMPOOL': false,
|
||||
'RUST_GBT': false,
|
||||
'CPFP_INDEXING': false,
|
||||
'MAX_BLOCKS_BULK_QUERY': 0,
|
||||
'DISK_CACHE_BLOCK_INTERVAL': 6,
|
||||
'MAX_PUSH_TX_SIZE_WEIGHT': 400000,
|
||||
'ALLOW_UNREACHABLE': true,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
|
@ -258,6 +270,12 @@ const defaults: IConfig = {
|
|||
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
|
||||
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
|
||||
},
|
||||
'REPLICATION': {
|
||||
'ENABLED': false,
|
||||
'AUDIT': false,
|
||||
'AUDIT_START_HEIGHT': 774000,
|
||||
'SERVERS': [],
|
||||
}
|
||||
};
|
||||
|
||||
class Config implements IConfig {
|
||||
|
@ -277,6 +295,7 @@ class Config implements IConfig {
|
|||
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
|
||||
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
|
||||
MAXMIND: IConfig['MAXMIND'];
|
||||
REPLICATION: IConfig['REPLICATION'];
|
||||
|
||||
constructor() {
|
||||
const configs = this.merge(configFromFile, defaults);
|
||||
|
@ -296,6 +315,7 @@ class Config implements IConfig {
|
|||
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
|
||||
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
|
||||
this.MAXMIND = configs.MAXMIND;
|
||||
this.REPLICATION = configs.REPLICATION;
|
||||
}
|
||||
|
||||
merge = (...objects: object[]): IConfig => {
|
||||
|
|
|
@ -30,7 +30,7 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
|
|||
}
|
||||
|
||||
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
|
||||
OkPacket[] | ResultSetHeader>(query, params?, connection?: PoolConnection): Promise<[T, FieldPacket[]]>
|
||||
{
|
||||
this.checkDBFlag();
|
||||
let hardTimeout;
|
||||
|
@ -45,7 +45,9 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
|
|||
reject(new Error(`DB query failed to return, reject or time out within ${hardTimeout / 1000}s - ${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}`));
|
||||
}, hardTimeout);
|
||||
|
||||
this.getPool().then(pool => {
|
||||
// Use a specific connection if provided, otherwise delegate to the pool
|
||||
const connectionPromise = connection ? Promise.resolve(connection) : this.getPool();
|
||||
connectionPromise.then((pool: PoolConnection | Pool) => {
|
||||
return pool.query(query, params) as Promise<[T, FieldPacket[]]>;
|
||||
}).then(result => {
|
||||
resolve(result);
|
||||
|
@ -61,6 +63,33 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
|
|||
}
|
||||
}
|
||||
|
||||
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||
OkPacket[] | ResultSetHeader>(queries: { query, params }[]): Promise<[T, FieldPacket[]][]>
|
||||
{
|
||||
const pool = await this.getPool();
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
await connection.beginTransaction();
|
||||
|
||||
const results: [T, FieldPacket[]][] = [];
|
||||
for (const query of queries) {
|
||||
const result = await this.query(query.query, query.params, connection) as [T, FieldPacket[]];
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
await connection.commit();
|
||||
|
||||
return results;
|
||||
} catch (e) {
|
||||
logger.err('Could not complete db transaction, rolling back: ' + (e instanceof Error ? e.message : e));
|
||||
connection.rollback();
|
||||
connection.release();
|
||||
throw e;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
public async checkDbConnection() {
|
||||
this.checkDBFlag();
|
||||
try {
|
||||
|
|
|
@ -150,7 +150,7 @@ class Server {
|
|||
|
||||
if (config.BISQ.ENABLED) {
|
||||
bisq.startBisqService();
|
||||
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitProperties('bsq-price', price));
|
||||
bisq.setPriceCallbackFunction((price) => websocketHandler.setExtraInitData('bsq-price', price));
|
||||
blocks.setNewBlockCallback(bisq.handleNewBitcoinBlock.bind(bisq));
|
||||
bisqMarkets.startBisqService();
|
||||
}
|
||||
|
@ -169,6 +169,7 @@ class Server {
|
|||
}
|
||||
|
||||
async runMainUpdateLoop(): Promise<void> {
|
||||
const start = Date.now();
|
||||
try {
|
||||
try {
|
||||
await memPool.$updateMemPoolInfo();
|
||||
|
@ -188,7 +189,9 @@ class Server {
|
|||
indexer.$run();
|
||||
|
||||
// rerun immediately if we skipped the mempool update, otherwise wait POLL_RATE_MS
|
||||
setTimeout(this.runMainUpdateLoop.bind(this), numHandledBlocks > 0 ? 1 : config.MEMPOOL.POLL_RATE_MS);
|
||||
const elapsed = Date.now() - start;
|
||||
const remainingTime = Math.max(0, config.MEMPOOL.POLL_RATE_MS - elapsed)
|
||||
setTimeout(this.runMainUpdateLoop.bind(this), numHandledBlocks > 0 ? 0 : remainingTime);
|
||||
this.backendRetryCount = 0;
|
||||
} catch (e: any) {
|
||||
this.backendRetryCount++;
|
||||
|
|
|
@ -6,6 +6,8 @@ import logger from './logger';
|
|||
import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import PricesRepository from './repositories/PricesRepository';
|
||||
import config from './config';
|
||||
import auditReplicator from './replication/AuditReplication';
|
||||
|
||||
export interface CoreIndex {
|
||||
name: string;
|
||||
|
@ -72,7 +74,7 @@ class Indexer {
|
|||
return;
|
||||
}
|
||||
|
||||
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
|
||||
if (task === 'blocksPrices' && !this.tasksRunning.includes(task) && !['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
this.tasksRunning.push(task);
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
|
||||
|
@ -135,6 +137,7 @@ class Indexer {
|
|||
await blocks.$generateBlocksSummariesDatabase();
|
||||
await blocks.$generateCPFPDatabase();
|
||||
await blocks.$generateAuditStats();
|
||||
await auditReplicator.$sync();
|
||||
} catch (e) {
|
||||
this.indexerRunning = false;
|
||||
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
|
|
@ -19,6 +19,7 @@ export interface PoolInfo {
|
|||
blockCount: number;
|
||||
slug: string;
|
||||
avgMatchRate: number | null;
|
||||
avgFeeDelta: number | null;
|
||||
}
|
||||
|
||||
export interface PoolStats extends PoolInfo {
|
||||
|
@ -33,6 +34,7 @@ export interface BlockAudit {
|
|||
missingTxs: string[],
|
||||
freshTxs: string[],
|
||||
sigopTxs: string[],
|
||||
fullrbfTxs: string[],
|
||||
addedTxs: string[],
|
||||
matchRate: number,
|
||||
expectedFees?: number,
|
||||
|
@ -93,9 +95,12 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
|
|||
}
|
||||
|
||||
export interface MempoolTransactionExtended extends TransactionExtended {
|
||||
order: number;
|
||||
sigops: number;
|
||||
adjustedVsize: number;
|
||||
adjustedFeePerVsize: number;
|
||||
inputs?: number[];
|
||||
lastBoosted?: number;
|
||||
}
|
||||
|
||||
export interface AuditTransaction {
|
||||
|
@ -125,9 +130,9 @@ export interface CompactThreadTransaction {
|
|||
weight: number;
|
||||
sigops: number;
|
||||
feePerVsize: number;
|
||||
effectiveFeePerVsize?: number;
|
||||
effectiveFeePerVsize: number;
|
||||
inputs: number[];
|
||||
cpfpRoot?: string;
|
||||
cpfpRoot?: number;
|
||||
cpfpChecked?: boolean;
|
||||
dirty?: boolean;
|
||||
}
|
||||
|
@ -224,6 +229,7 @@ export interface BlockExtension {
|
|||
*/
|
||||
export interface BlockExtended extends IEsploraApi.Block {
|
||||
extras: BlockExtension;
|
||||
canonical?: string;
|
||||
}
|
||||
|
||||
export interface BlockSummary {
|
||||
|
@ -231,6 +237,15 @@ export interface BlockSummary {
|
|||
transactions: TransactionStripped[];
|
||||
}
|
||||
|
||||
export interface AuditSummary extends BlockAudit {
|
||||
timestamp?: number,
|
||||
size?: number,
|
||||
weight?: number,
|
||||
tx_count?: number,
|
||||
transactions: TransactionStripped[];
|
||||
template?: TransactionStripped[];
|
||||
}
|
||||
|
||||
export interface BlockPrice {
|
||||
height: number;
|
||||
priceId: number;
|
||||
|
|
134
backend/src/replication/AuditReplication.ts
Normal file
134
backend/src/replication/AuditReplication.ts
Normal file
|
@ -0,0 +1,134 @@
|
|||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { AuditSummary } from '../mempool.interfaces';
|
||||
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import blocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import { $sync } from './replicator';
|
||||
import config from '../config';
|
||||
import { Common } from '../api/common';
|
||||
import blocks from '../api/blocks';
|
||||
|
||||
const BATCH_SIZE = 16;
|
||||
|
||||
/**
|
||||
* Syncs missing block template and audit data from trusted servers
|
||||
*/
|
||||
class AuditReplication {
|
||||
inProgress: boolean = false;
|
||||
skip: Set<string> = new Set();
|
||||
|
||||
public async $sync(): Promise<void> {
|
||||
if (!config.REPLICATION.ENABLED || !config.REPLICATION.AUDIT) {
|
||||
// replication not enabled
|
||||
return;
|
||||
}
|
||||
if (this.inProgress) {
|
||||
logger.info(`AuditReplication sync already in progress`, 'Replication');
|
||||
return;
|
||||
}
|
||||
this.inProgress = true;
|
||||
|
||||
const missingAudits = await this.$getMissingAuditBlocks();
|
||||
|
||||
logger.debug(`Fetching missing audit data for ${missingAudits.length} blocks from trusted servers`, 'Replication');
|
||||
|
||||
let totalSynced = 0;
|
||||
let totalMissed = 0;
|
||||
let loggerTimer = Date.now();
|
||||
// process missing audits in batches of
|
||||
for (let i = 0; i < missingAudits.length; i += BATCH_SIZE) {
|
||||
const slice = missingAudits.slice(i, i + BATCH_SIZE);
|
||||
const results = await Promise.all(slice.map(hash => this.$syncAudit(hash)));
|
||||
const synced = results.reduce((total, status) => status ? total + 1 : total, 0);
|
||||
totalSynced += synced;
|
||||
totalMissed += (slice.length - synced);
|
||||
if (Date.now() - loggerTimer > 10000) {
|
||||
loggerTimer = Date.now();
|
||||
logger.info(`Found ${totalSynced} / ${totalSynced + totalMissed} of ${missingAudits.length} missing audits`, 'Replication');
|
||||
}
|
||||
await Common.sleep$(1000);
|
||||
}
|
||||
|
||||
logger.debug(`Fetched ${totalSynced} audits, ${totalMissed} still missing`, 'Replication');
|
||||
|
||||
this.inProgress = false;
|
||||
}
|
||||
|
||||
private async $syncAudit(hash: string): Promise<boolean> {
|
||||
if (this.skip.has(hash)) {
|
||||
// we already know none of our trusted servers have this audit
|
||||
return false;
|
||||
}
|
||||
|
||||
let success = false;
|
||||
// start with a random server so load is uniformly spread
|
||||
const syncResult = await $sync(`/api/v1/block/${hash}/audit-summary`);
|
||||
if (syncResult) {
|
||||
if (syncResult.data?.template?.length) {
|
||||
await this.$saveAuditData(hash, syncResult.data);
|
||||
logger.info(`Imported audit data from ${syncResult.server} for block ${syncResult.data.height} (${hash})`);
|
||||
success = true;
|
||||
}
|
||||
if (!syncResult.data && !syncResult.exists) {
|
||||
this.skip.add(hash);
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
private async $getMissingAuditBlocks(): Promise<string[]> {
|
||||
try {
|
||||
const startHeight = config.REPLICATION.AUDIT_START_HEIGHT || 0;
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT auditable.hash, auditable.height
|
||||
FROM (
|
||||
SELECT hash, height
|
||||
FROM blocks
|
||||
WHERE height >= ?
|
||||
) AS auditable
|
||||
LEFT JOIN blocks_audits ON auditable.hash = blocks_audits.hash
|
||||
WHERE blocks_audits.hash IS NULL
|
||||
ORDER BY auditable.height DESC
|
||||
`, [startHeight]);
|
||||
return rows.map(row => row.hash);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch missing audit blocks from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private async $saveAuditData(blockHash: string, auditSummary: AuditSummary): Promise<void> {
|
||||
// save audit & template to DB
|
||||
await blocksSummariesRepository.$saveTemplate({
|
||||
height: auditSummary.height,
|
||||
template: {
|
||||
id: blockHash,
|
||||
transactions: auditSummary.template || []
|
||||
}
|
||||
});
|
||||
await blocksAuditsRepository.$saveAudit({
|
||||
hash: blockHash,
|
||||
height: auditSummary.height,
|
||||
time: auditSummary.timestamp || auditSummary.time,
|
||||
missingTxs: auditSummary.missingTxs || [],
|
||||
addedTxs: auditSummary.addedTxs || [],
|
||||
freshTxs: auditSummary.freshTxs || [],
|
||||
sigopTxs: auditSummary.sigopTxs || [],
|
||||
fullrbfTxs: auditSummary.fullrbfTxs || [],
|
||||
matchRate: auditSummary.matchRate,
|
||||
expectedFees: auditSummary.expectedFees,
|
||||
expectedWeight: auditSummary.expectedWeight,
|
||||
});
|
||||
// add missing data to cached blocks
|
||||
const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash);
|
||||
if (cachedBlock) {
|
||||
cachedBlock.extras.matchRate = auditSummary.matchRate;
|
||||
cachedBlock.extras.expectedFees = auditSummary.expectedFees || null;
|
||||
cachedBlock.extras.expectedWeight = auditSummary.expectedWeight || null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new AuditReplication();
|
||||
|
70
backend/src/replication/replicator.ts
Normal file
70
backend/src/replication/replicator.ts
Normal file
|
@ -0,0 +1,70 @@
|
|||
import config from '../config';
|
||||
import backendInfo from '../api/backend-info';
|
||||
import axios, { AxiosResponse } from 'axios';
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent';
|
||||
import * as https from 'https';
|
||||
|
||||
export async function $sync(path): Promise<{ data?: any, exists: boolean, server?: string }> {
|
||||
// start with a random server so load is uniformly spread
|
||||
let allMissing = true;
|
||||
const offset = Math.floor(Math.random() * config.REPLICATION.SERVERS.length);
|
||||
for (let i = 0; i < config.REPLICATION.SERVERS.length; i++) {
|
||||
const server = config.REPLICATION.SERVERS[(i + offset) % config.REPLICATION.SERVERS.length];
|
||||
// don't query ourself
|
||||
if (server === backendInfo.getBackendInfo().hostname) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await query(`https://${server}${path}`);
|
||||
if (result) {
|
||||
return { data: result, exists: true, server };
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e?.response?.status === 404) {
|
||||
// this server is also missing this data
|
||||
} else {
|
||||
// something else went wrong
|
||||
allMissing = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { exists: !allMissing };
|
||||
}
|
||||
|
||||
export async function query(path): Promise<object> {
|
||||
type axiosOptions = {
|
||||
headers: {
|
||||
'User-Agent': string
|
||||
};
|
||||
timeout: number;
|
||||
httpsAgent?: https.Agent;
|
||||
};
|
||||
const axiosOptions: axiosOptions = {
|
||||
headers: {
|
||||
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
|
||||
},
|
||||
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
|
||||
};
|
||||
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
const socksOptions = {
|
||||
agentOptions: {
|
||||
keepAlive: true,
|
||||
},
|
||||
hostname: config.SOCKS5PROXY.HOST,
|
||||
port: config.SOCKS5PROXY.PORT,
|
||||
username: config.SOCKS5PROXY.USERNAME || 'circuit0',
|
||||
password: config.SOCKS5PROXY.PASSWORD,
|
||||
};
|
||||
|
||||
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
|
||||
}
|
||||
|
||||
const data: AxiosResponse = await axios.get(path, axiosOptions);
|
||||
if (data.statusText === 'error' || !data.data) {
|
||||
throw new Error(`${data.status}`);
|
||||
}
|
||||
return data.data;
|
||||
}
|
|
@ -6,9 +6,9 @@ import { BlockAudit, AuditScore } from '../mempool.interfaces';
|
|||
class BlocksAuditRepositories {
|
||||
public async $saveAudit(audit: BlockAudit): Promise<void> {
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, match_rate, expected_fees, expected_weight)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, sigop_txs, fullrbf_txs, match_rate, expected_fees, expected_weight)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
|
||||
|
@ -62,21 +62,18 @@ class BlocksAuditRepositories {
|
|||
public async $getBlockAudit(hash: string): Promise<any> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
|
||||
blocks.weight, blocks.tx_count,
|
||||
transactions,
|
||||
`SELECT blocks_audits.height, blocks_audits.hash as id, UNIX_TIMESTAMP(blocks_audits.time) as timestamp,
|
||||
template,
|
||||
missing_txs as missingTxs,
|
||||
added_txs as addedTxs,
|
||||
fresh_txs as freshTxs,
|
||||
sigop_txs as sigopTxs,
|
||||
fullrbf_txs as fullrbfTxs,
|
||||
match_rate as matchRate,
|
||||
expected_fees as expectedFees,
|
||||
expected_weight as expectedWeight
|
||||
FROM blocks_audits
|
||||
JOIN blocks ON blocks.hash = blocks_audits.hash
|
||||
JOIN blocks_templates ON blocks_templates.id = blocks_audits.hash
|
||||
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
|
||||
|
@ -85,12 +82,10 @@ class BlocksAuditRepositories {
|
|||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
|
||||
rows[0].sigopTxs = JSON.parse(rows[0].sigopTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].fullrbfTxs = JSON.parse(rows[0].fullrbfTxs);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
|
||||
if (rows[0].transactions.length) {
|
||||
return rows[0];
|
||||
}
|
||||
return rows[0];
|
||||
}
|
||||
return null;
|
||||
} catch (e: any) {
|
||||
|
|
|
@ -401,7 +401,7 @@ class BlocksRepository {
|
|||
/**
|
||||
* Get average block health for all blocks for a single pool
|
||||
*/
|
||||
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number> {
|
||||
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number | null> {
|
||||
const params: any[] = [];
|
||||
const query = `
|
||||
SELECT AVG(blocks_audits.match_rate) AS avg_match_rate
|
||||
|
@ -413,8 +413,8 @@ class BlocksRepository {
|
|||
|
||||
try {
|
||||
const [rows] = await DB.query(query, params);
|
||||
if (!rows[0] || !rows[0].avg_match_rate) {
|
||||
return 0;
|
||||
if (!rows[0] || rows[0].avg_match_rate == null) {
|
||||
return null;
|
||||
}
|
||||
return Math.round(rows[0].avg_match_rate * 100) / 100;
|
||||
} catch (e) {
|
||||
|
|
|
@ -5,52 +5,10 @@ import { Ancestor, CpfpCluster } from '../mempool.interfaces';
|
|||
import transactionRepository from '../repositories/TransactionRepository';
|
||||
|
||||
class CpfpRepository {
|
||||
public async $saveCluster(clusterRoot: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<boolean> {
|
||||
if (!txs[0]) {
|
||||
return false;
|
||||
}
|
||||
// skip clusters of transactions with the same fees
|
||||
const roundedEffectiveFee = Math.round(effectiveFeePerVsize * 100) / 100;
|
||||
const equalFee = txs.length > 1 && txs.reduce((acc, tx) => {
|
||||
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
|
||||
}, true);
|
||||
if (equalFee) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const packedTxs = Buffer.from(this.pack(txs));
|
||||
await DB.query(
|
||||
`
|
||||
INSERT INTO compact_cpfp_clusters(root, height, txs, fee_rate)
|
||||
VALUE (UNHEX(?), ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
height = ?,
|
||||
txs = ?,
|
||||
fee_rate = ?
|
||||
`,
|
||||
[clusterRoot, height, packedTxs, effectiveFeePerVsize, height, packedTxs, effectiveFeePerVsize]
|
||||
);
|
||||
const maxChunk = 10;
|
||||
let chunkIndex = 0;
|
||||
while (chunkIndex < txs.length) {
|
||||
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk).map(tx => {
|
||||
return { txid: tx.txid, cluster: clusterRoot };
|
||||
});
|
||||
await transactionRepository.$batchSetCluster(chunk);
|
||||
chunkIndex += maxChunk;
|
||||
}
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $batchSaveClusters(clusters: { root: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number }[]): Promise<boolean> {
|
||||
try {
|
||||
const clusterValues: any[] = [];
|
||||
const txs: any[] = [];
|
||||
const clusterValues: [string, number, Buffer, number][] = [];
|
||||
const txs: { txid: string, cluster: string }[] = [];
|
||||
|
||||
for (const cluster of clusters) {
|
||||
if (cluster.txs?.length) {
|
||||
|
@ -76,6 +34,8 @@ class CpfpRepository {
|
|||
return false;
|
||||
}
|
||||
|
||||
const queries: { query, params }[] = [];
|
||||
|
||||
const maxChunk = 100;
|
||||
let chunkIndex = 0;
|
||||
// insert clusters in batches of up to 100 rows
|
||||
|
@ -89,10 +49,10 @@ class CpfpRepository {
|
|||
return (' (UNHEX(?), ?, ?, ?)');
|
||||
}) + ';';
|
||||
const values = chunk.flat();
|
||||
await DB.query(
|
||||
queries.push({
|
||||
query,
|
||||
values
|
||||
);
|
||||
params: values,
|
||||
});
|
||||
chunkIndex += maxChunk;
|
||||
}
|
||||
|
||||
|
@ -100,10 +60,12 @@ class CpfpRepository {
|
|||
// insert transactions in batches of up to 100 rows
|
||||
while (chunkIndex < txs.length) {
|
||||
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
|
||||
await transactionRepository.$batchSetCluster(chunk);
|
||||
queries.push(transactionRepository.buildBatchSetQuery(chunk));
|
||||
chunkIndex += maxChunk;
|
||||
}
|
||||
|
||||
await DB.$atomicQuery(queries);
|
||||
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save cpfp clusters into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
|
|
@ -39,7 +39,8 @@ class PoolsRepository {
|
|||
pools.name AS name,
|
||||
pools.link AS link,
|
||||
slug,
|
||||
AVG(blocks_audits.match_rate) AS avgMatchRate
|
||||
AVG(blocks_audits.match_rate) AS avgMatchRate,
|
||||
AVG((CAST(blocks.fees as SIGNED) - CAST(blocks_audits.expected_fees as SIGNED)) / NULLIF(CAST(blocks_audits.expected_fees as SIGNED), 0)) AS avgFeeDelta
|
||||
FROM blocks
|
||||
JOIN pools on pools.id = pool_id
|
||||
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height
|
||||
|
|
|
@ -25,9 +25,8 @@ class TransactionRepository {
|
|||
}
|
||||
}
|
||||
|
||||
public async $batchSetCluster(txs): Promise<void> {
|
||||
try {
|
||||
let query = `
|
||||
public buildBatchSetQuery(txs: { txid: string, cluster: string }[]): { query, params } {
|
||||
let query = `
|
||||
INSERT IGNORE INTO compact_transactions
|
||||
(
|
||||
txid,
|
||||
|
@ -35,13 +34,22 @@ class TransactionRepository {
|
|||
)
|
||||
VALUES
|
||||
`;
|
||||
query += txs.map(tx => {
|
||||
return (' (UNHEX(?), UNHEX(?))');
|
||||
}) + ';';
|
||||
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
|
||||
query += txs.map(tx => {
|
||||
return (' (UNHEX(?), UNHEX(?))');
|
||||
}) + ';';
|
||||
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
|
||||
return {
|
||||
query,
|
||||
params: values,
|
||||
};
|
||||
}
|
||||
|
||||
public async $batchSetCluster(txs): Promise<void> {
|
||||
try {
|
||||
const query = this.buildBatchSetQuery(txs);
|
||||
await DB.query(
|
||||
query,
|
||||
values
|
||||
query.query,
|
||||
query.params,
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save cpfp transactions into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
|
|
@ -3,7 +3,6 @@ import logger from '../../logger';
|
|||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
|
@ -269,7 +268,11 @@ class NetworkSyncService {
|
|||
}
|
||||
|
||||
private async $scanForClosedChannels(): Promise<void> {
|
||||
if (this.closedChannelsScanBlock === blocks.getCurrentBlockHeight()) {
|
||||
let currentBlockHeight = blocks.getCurrentBlockHeight();
|
||||
if (config.MEMPOOL.ENABLED === false) { // https://github.com/mempool/mempool/issues/3582
|
||||
currentBlockHeight = await bitcoinApi.$getBlockHeightTip();
|
||||
}
|
||||
if (this.closedChannelsScanBlock === currentBlockHeight) {
|
||||
logger.debug(`We've already scan closed channels for this block, skipping.`);
|
||||
return;
|
||||
}
|
||||
|
@ -305,7 +308,7 @@ class NetworkSyncService {
|
|||
}
|
||||
}
|
||||
|
||||
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
|
||||
this.closedChannelsScanBlock = currentBlockHeight;
|
||||
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
|
||||
} catch (e) {
|
||||
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
|
||||
|
|
|
@ -153,6 +153,7 @@ class PriceUpdater {
|
|||
try {
|
||||
const p = 60 * 60 * 1000; // milliseconds in an hour
|
||||
const nowRounded = new Date(Math.round(new Date().getTime() / p) * p); // https://stackoverflow.com/a/28037042
|
||||
this.latestPrices.time = nowRounded.getTime() / 1000;
|
||||
await PricesRepository.$savePrices(nowRounded.getTime() / 1000, this.latestPrices);
|
||||
} catch (e) {
|
||||
this.lastRun = previousRun + 5 * 60;
|
||||
|
|
|
@ -27,3 +27,69 @@ export function formatBytes(bytes: number, toUnit: string, skipUnit = false): st
|
|||
|
||||
return `${bytes.toFixed(2)}${skipUnit ? '' : ' ' + byteUnits[unitIndex]}`;
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/64235212
|
||||
export function hex2bin(hex: string): string {
|
||||
if (!hex) {
|
||||
return '';
|
||||
}
|
||||
|
||||
hex = hex.replace('0x', '').toLowerCase();
|
||||
let out = '';
|
||||
|
||||
for (const c of hex) {
|
||||
switch (c) {
|
||||
case '0': out += '0000'; break;
|
||||
case '1': out += '0001'; break;
|
||||
case '2': out += '0010'; break;
|
||||
case '3': out += '0011'; break;
|
||||
case '4': out += '0100'; break;
|
||||
case '5': out += '0101'; break;
|
||||
case '6': out += '0110'; break;
|
||||
case '7': out += '0111'; break;
|
||||
case '8': out += '1000'; break;
|
||||
case '9': out += '1001'; break;
|
||||
case 'a': out += '1010'; break;
|
||||
case 'b': out += '1011'; break;
|
||||
case 'c': out += '1100'; break;
|
||||
case 'd': out += '1101'; break;
|
||||
case 'e': out += '1110'; break;
|
||||
case 'f': out += '1111'; break;
|
||||
default: return '';
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function bin2hex(bin: string): string {
|
||||
if (!bin) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let out = '';
|
||||
|
||||
for (let i = 0; i < bin.length; i += 4) {
|
||||
const c = bin.substring(i, i + 4);
|
||||
switch (c) {
|
||||
case '0000': out += '0'; break;
|
||||
case '0001': out += '1'; break;
|
||||
case '0010': out += '2'; break;
|
||||
case '0011': out += '3'; break;
|
||||
case '0100': out += '4'; break;
|
||||
case '0101': out += '5'; break;
|
||||
case '0110': out += '6'; break;
|
||||
case '0111': out += '7'; break;
|
||||
case '1000': out += '8'; break;
|
||||
case '1001': out += '9'; break;
|
||||
case '1010': out += 'a'; break;
|
||||
case '1011': out += 'b'; break;
|
||||
case '1100': out += 'c'; break;
|
||||
case '1101': out += 'd'; break;
|
||||
case '1110': out += 'e'; break;
|
||||
case '1111': out += 'f'; break;
|
||||
default: return '';
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
3
contributors/0xflicker.txt
Normal file
3
contributors/0xflicker.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 24, 2023.
|
||||
|
||||
Signed: 0xflicker
|
3
contributors/bennyhodl.txt
Normal file
3
contributors/bennyhodl.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 28, 2023.
|
||||
|
||||
Signed: bennyhodl
|
3
contributors/devinbileck.txt
Normal file
3
contributors/devinbileck.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 21, 2023.
|
||||
|
||||
Signed: devinbileck
|
3
contributors/learntheropes.txt
Normal file
3
contributors/learntheropes.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 7, 203.
|
||||
|
||||
Signed: learntheropes
|
3
contributors/nothing0012.txt
Normal file
3
contributors/nothing0012.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of April 8, 2023.
|
||||
|
||||
Signed: nothing0012
|
3
contributors/pedromvpg.txt
Normal file
3
contributors/pedromvpg.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 20, 2023.
|
||||
|
||||
Signed: pedromvpg
|
3
contributors/pfoytik.txt
Normal file
3
contributors/pfoytik.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 15, 2023.
|
||||
|
||||
Signed pfoytik
|
3
contributors/secondl1ght.txt
Normal file
3
contributors/secondl1ght.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of June 14, 2023.
|
||||
|
||||
Signed: secondl1ght
|
|
@ -144,8 +144,8 @@ Corresponding `docker-compose.yml` overrides:
|
|||
MEMPOOL_ADVANCED_GBT_AUDIT: ""
|
||||
MEMPOOL_ADVANCED_GBT_MEMPOOL: ""
|
||||
MEMPOOL_CPFP_INDEXING: ""
|
||||
MAX_BLOCKS_BULK_QUERY: ""
|
||||
DISK_CACHE_BLOCK_INTERVAL: ""
|
||||
MEMPOOL_MAX_BLOCKS_BULK_QUERY: ""
|
||||
MEMPOOL_DISK_CACHE_BLOCK_INTERVAL: ""
|
||||
...
|
||||
```
|
||||
|
||||
|
|
|
@ -7,7 +7,13 @@ WORKDIR /build
|
|||
COPY . .
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential python3 pkg-config
|
||||
RUN apt-get install -y build-essential python3 pkg-config curl ca-certificates
|
||||
|
||||
# Install Rust via rustup
|
||||
RUN CPU_ARCH=$(uname -m); if [ "$CPU_ARCH" = "armv7l" ]; then c_rehash; fi
|
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
|
||||
ENV PATH="/root/.cargo/bin:$PATH"
|
||||
|
||||
RUN npm install --omit=dev --omit=optional
|
||||
RUN npm run package
|
||||
|
||||
|
|
|
@ -25,9 +25,12 @@
|
|||
"AUDIT": __MEMPOOL_AUDIT__,
|
||||
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
|
||||
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
|
||||
"RUST_GBT": __MEMPOOL_RUST_GBT__,
|
||||
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
|
||||
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__,
|
||||
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__,
|
||||
"MAX_PUSH_TX_SIZE_WEIGHT": __MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__,
|
||||
"ALLOW_UNREACHABLE": __MEMPOOL_ALLOW_UNREACHABLE__,
|
||||
"POOLS_JSON_TREE_URL": "__MEMPOOL_POOLS_JSON_TREE_URL__",
|
||||
"POOLS_JSON_URL": "__MEMPOOL_POOLS_JSON_URL__"
|
||||
},
|
||||
|
@ -124,5 +127,11 @@
|
|||
"GEOLITE2_CITY": "__MAXMIND_GEOLITE2_CITY__",
|
||||
"GEOLITE2_ASN": "__MAXMIND_GEOLITE2_ASN__",
|
||||
"GEOIP2_ISP": "__MAXMIND_GEOIP2_ISP__"
|
||||
},
|
||||
"REPLICATION": {
|
||||
"ENABLED": __REPLICATION_ENABLED__,
|
||||
"AUDIT": __REPLICATION_AUDIT__,
|
||||
"AUDIT_START_HEIGHT": __REPLICATION_AUDIT_START_HEIGHT__,
|
||||
"SERVERS": __REPLICATION_SERVERS__
|
||||
}
|
||||
}
|
|
@ -28,9 +28,13 @@ __MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.githu
|
|||
__MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
|
||||
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
|
||||
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
|
||||
__MEMPOOL_RUST_GBT__=${MEMPOOL_RUST_GBT:=false}
|
||||
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
|
||||
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
|
||||
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
|
||||
__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__=${MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT:=4000000}
|
||||
__MEMPOOL_ALLOW_UNREACHABLE__=${MEMPOOL_ALLOW_UNREACHABLE:=true}
|
||||
|
||||
|
||||
# CORE_RPC
|
||||
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
|
||||
|
@ -126,6 +130,12 @@ __MAXMIND_GEOLITE2_CITY__=${MAXMIND_GEOLITE2_CITY:="/backend/GeoIP/GeoLite2-City
|
|||
__MAXMIND_GEOLITE2_ASN__=${MAXMIND_GEOLITE2_ASN:="/backend/GeoIP/GeoLite2-ASN.mmdb"}
|
||||
__MAXMIND_GEOIP2_ISP__=${MAXMIND_GEOIP2_ISP:=""}
|
||||
|
||||
# REPLICATION
|
||||
__REPLICATION_ENABLED__=${REPLICATION_ENABLED:=true}
|
||||
__REPLICATION_AUDIT__=${REPLICATION_AUDIT:=true}
|
||||
__REPLICATION_AUDIT_START_HEIGHT__=${REPLICATION_AUDIT_START_HEIGHT:=774000}
|
||||
__REPLICATION_SERVERS__=${REPLICATION_SERVERS:=[]}
|
||||
|
||||
|
||||
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
|
||||
|
||||
|
@ -155,10 +165,13 @@ sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-co
|
|||
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_GBT__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__!${__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__!${__MEMPOOL_MAX_PUSH_TX_SIZE_WEIGHT__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_ALLOW_UNREACHABLE__!${__MEMPOOL_ALLOW_UNREACHABLE__}!g" mempool-config.json
|
||||
|
||||
sed -i "s!__CORE_RPC_HOST__!${__CORE_RPC_HOST__}!g" mempool-config.json
|
||||
sed -i "s!__CORE_RPC_PORT__!${__CORE_RPC_PORT__}!g" mempool-config.json
|
||||
|
@ -243,5 +256,10 @@ sed -i "s!__MAXMIND_GEOLITE2_CITY__!${__MAXMIND_GEOLITE2_CITY__}!g" mempool-conf
|
|||
sed -i "s!__MAXMIND_GEOLITE2_ASN__!${__MAXMIND_GEOLITE2_ASN__}!g" mempool-config.json
|
||||
sed -i "s!__MAXMIND_GEOIP2_ISP__!${__MAXMIND_GEOIP2_ISP__}!g" mempool-config.json
|
||||
|
||||
# REPLICATION
|
||||
sed -i "s!__REPLICATION_ENABLED__!${__REPLICATION_ENABLED__}!g" mempool-config.json
|
||||
sed -i "s!__REPLICATION_AUDIT__!${__REPLICATION_AUDIT__}!g" mempool-config.json
|
||||
sed -i "s!__REPLICATION_AUDIT_START_HEIGHT__!${__REPLICATION_AUDIT_START_HEIGHT__}!g" mempool-config.json
|
||||
sed -i "s!__REPLICATION_SERVERS__!${__REPLICATION_SERVERS__}!g" mempool-config.json
|
||||
|
||||
node /backend/package/index.js
|
||||
|
|
|
@ -39,7 +39,6 @@ __AUDIT__=${AUDIT:=false}
|
|||
__MAINNET_BLOCK_AUDIT_START_HEIGHT__=${MAINNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
__TESTNET_BLOCK_AUDIT_START_HEIGHT__=${TESTNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
__SIGNET_BLOCK_AUDIT_START_HEIGHT__=${SIGNET_BLOCK_AUDIT_START_HEIGHT:=0}
|
||||
__FULL_RBF_ENABLED__=${FULL_RBF_ENABLED:=false}
|
||||
__HISTORICAL_PRICE__=${HISTORICAL_PRICE:=true}
|
||||
|
||||
# Export as environment variables to be used by envsubst
|
||||
|
@ -66,7 +65,6 @@ export __AUDIT__
|
|||
export __MAINNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
export __TESTNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
export __SIGNET_BLOCK_AUDIT_START_HEIGHT__
|
||||
export __FULL_RBF_ENABLED__
|
||||
export __HISTORICAL_PRICE__
|
||||
|
||||
folder=$(find /var/www/mempool -name "config.js" | xargs dirname)
|
||||
|
|
|
@ -2,11 +2,15 @@
|
|||
# For additional information regarding the format and rule options, please see:
|
||||
# https://github.com/browserslist/browserslist#queries
|
||||
|
||||
# For the full list of supported browsers by the Angular framework, please see:
|
||||
# https://angular.io/guide/browser-support
|
||||
|
||||
# You can see what browsers were selected by your queries by running:
|
||||
# npx browserslist
|
||||
|
||||
> 0.5%
|
||||
last 2 versions
|
||||
last 2 Chrome versions
|
||||
last 1 Firefox version
|
||||
last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
||||
not dead
|
||||
not IE 9-11 # For IE 9-11 support, remove 'not'.
|
|
@ -17,7 +17,7 @@ Get the latest Mempool code:
|
|||
|
||||
```
|
||||
git clone https://github.com/mempool/mempool
|
||||
cd mempool
|
||||
cd mempool/frontend
|
||||
```
|
||||
|
||||
### 2. Specify Website
|
||||
|
|
|
@ -22,6 +22,5 @@
|
|||
"TESTNET_BLOCK_AUDIT_START_HEIGHT": 0,
|
||||
"SIGNET_BLOCK_AUDIT_START_HEIGHT": 0,
|
||||
"LIGHTNING": false,
|
||||
"FULL_RBF_ENABLED": false,
|
||||
"HISTORICAL_PRICE": true
|
||||
}
|
||||
|
|
15035
frontend/package-lock.json
generated
15035
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "mempool-frontend",
|
||||
"version": "2.6.0-dev",
|
||||
"version": "3.0.0-dev",
|
||||
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"homepage": "https://mempool.space",
|
||||
|
@ -61,60 +61,60 @@
|
|||
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
|
||||
},
|
||||
"dependencies": {
|
||||
"@angular-devkit/build-angular": "^14.2.10",
|
||||
"@angular/animations": "^14.2.12",
|
||||
"@angular/cli": "^14.2.10",
|
||||
"@angular/common": "^14.2.12",
|
||||
"@angular/compiler": "^14.2.12",
|
||||
"@angular/core": "^14.2.12",
|
||||
"@angular/forms": "^14.2.12",
|
||||
"@angular/localize": "^14.2.12",
|
||||
"@angular/platform-browser": "^14.2.12",
|
||||
"@angular/platform-browser-dynamic": "^14.2.12",
|
||||
"@angular/platform-server": "^14.2.12",
|
||||
"@angular/router": "^14.2.12",
|
||||
"@fortawesome/angular-fontawesome": "~0.11.1",
|
||||
"@fortawesome/fontawesome-common-types": "~6.2.1",
|
||||
"@fortawesome/fontawesome-svg-core": "~6.2.1",
|
||||
"@fortawesome/free-solid-svg-icons": "~6.2.1",
|
||||
"@angular-devkit/build-angular": "^16.1.4",
|
||||
"@angular/animations": "^16.1.5",
|
||||
"@angular/cli": "^16.1.4",
|
||||
"@angular/common": "^16.1.5",
|
||||
"@angular/compiler": "^16.1.5",
|
||||
"@angular/core": "^16.1.5",
|
||||
"@angular/forms": "^16.1.5",
|
||||
"@angular/localize": "^16.1.5",
|
||||
"@angular/platform-browser": "^16.1.5",
|
||||
"@angular/platform-browser-dynamic": "^16.1.5",
|
||||
"@angular/platform-server": "^16.1.5",
|
||||
"@angular/router": "^16.1.5",
|
||||
"@fortawesome/angular-fontawesome": "~0.13.0",
|
||||
"@fortawesome/fontawesome-common-types": "~6.4.0",
|
||||
"@fortawesome/fontawesome-svg-core": "~6.4.0",
|
||||
"@fortawesome/free-solid-svg-icons": "~6.4.0",
|
||||
"@mempool/mempool.js": "2.3.0",
|
||||
"@ng-bootstrap/ng-bootstrap": "^13.1.1",
|
||||
"@ng-bootstrap/ng-bootstrap": "^15.1.0",
|
||||
"@types/qrcode": "~1.5.0",
|
||||
"bootstrap": "~4.6.1",
|
||||
"bootstrap": "~4.6.2",
|
||||
"browserify": "^17.0.0",
|
||||
"clipboard": "^2.0.11",
|
||||
"domino": "^2.1.6",
|
||||
"echarts": "~5.4.1",
|
||||
"echarts": "~5.4.3",
|
||||
"echarts-gl": "^2.0.9",
|
||||
"lightweight-charts": "~3.8.0",
|
||||
"ngx-echarts": "~14.0.0",
|
||||
"ngx-infinite-scroll": "^14.0.1",
|
||||
"ngx-echarts": "~16.0.0",
|
||||
"ngx-infinite-scroll": "^16.0.0",
|
||||
"qrcode": "1.5.1",
|
||||
"rxjs": "~7.8.0",
|
||||
"tinyify": "^3.1.0",
|
||||
"rxjs": "~7.8.1",
|
||||
"tinyify": "^4.0.0",
|
||||
"tlite": "^0.1.9",
|
||||
"tslib": "~2.4.1",
|
||||
"zone.js": "~0.12.0"
|
||||
"tslib": "~2.6.0",
|
||||
"zone.js": "~0.13.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular/compiler-cli": "^14.2.12",
|
||||
"@angular/language-service": "^14.2.12",
|
||||
"@angular/compiler-cli": "^16.1.5",
|
||||
"@angular/language-service": "^16.1.5",
|
||||
"@types/node": "^18.11.9",
|
||||
"@typescript-eslint/eslint-plugin": "^5.48.1",
|
||||
"@typescript-eslint/parser": "^5.48.1",
|
||||
"eslint": "^8.31.0",
|
||||
"http-proxy-middleware": "~2.0.6",
|
||||
"prettier": "^2.8.2",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-node": "~10.9.1",
|
||||
"typescript": "~4.6.4"
|
||||
"typescript": "~4.9.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@cypress/schematic": "^2.4.0",
|
||||
"cypress": "^12.7.0",
|
||||
"cypress-fail-on-console-error": "~4.0.2",
|
||||
"@cypress/schematic": "^2.5.0",
|
||||
"cypress": "^12.17.1",
|
||||
"cypress-fail-on-console-error": "~4.0.3",
|
||||
"cypress-wait-until": "^1.7.2",
|
||||
"mock-socket": "~9.1.5",
|
||||
"start-server-and-test": "~1.14.0"
|
||||
"mock-socket": "~9.2.1",
|
||||
"start-server-and-test": "~2.0.0"
|
||||
},
|
||||
"scarfSettings": {
|
||||
"enabled": false
|
||||
|
|
|
@ -22,6 +22,7 @@ import { AssetsFeaturedComponent } from './components/assets/assets-featured/ass
|
|||
import { AssetsComponent } from './components/assets/assets.component';
|
||||
import { AssetComponent } from './components/asset/asset.component';
|
||||
import { AssetsNavComponent } from './components/assets/assets-nav/assets-nav.component';
|
||||
import { CalculatorComponent } from './components/calculator/calculator.component';
|
||||
|
||||
const browserWindow = window || {};
|
||||
// @ts-ignore
|
||||
|
@ -278,6 +279,10 @@ let routes: Routes = [
|
|||
path: 'rbf',
|
||||
component: RbfList,
|
||||
},
|
||||
{
|
||||
path: 'tools/calculator',
|
||||
component: CalculatorComponent
|
||||
},
|
||||
{
|
||||
path: 'terms-of-service',
|
||||
component: TermsOfServiceComponent
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { BrowserModule, BrowserTransferStateModule } from '@angular/platform-browser';
|
||||
import { BrowserModule } from '@angular/platform-browser';
|
||||
import { ModuleWithProviders, NgModule } from '@angular/core';
|
||||
import { HttpClientModule, HTTP_INTERCEPTORS } from '@angular/common/http';
|
||||
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
|
||||
|
@ -48,8 +48,7 @@ const providers = [
|
|||
AppComponent,
|
||||
],
|
||||
imports: [
|
||||
BrowserModule.withServerTransition({ appId: 'serverApp' }),
|
||||
BrowserTransferStateModule,
|
||||
BrowserModule,
|
||||
AppRoutingModule,
|
||||
HttpClientModule,
|
||||
BrowserAnimationsModule,
|
||||
|
|
|
@ -64,9 +64,10 @@
|
|||
{{ bisqTx.burntFee / 100 | number: '1.2-2' }} <span class="symbol">BSQ</span> <span class="fiat"><app-bsq-amount [bsq]="bisqTx.burntFee" [forceFiat]="true" [green]="true"></app-bsq-amount></span>
|
||||
</tr>
|
||||
<tr>
|
||||
<td i18n="transaction.fee-per-vbyte|Transaction fee">Fee per vByte</td>
|
||||
<td *only-vsize i18n="transaction.fee-per-vbyte|Transaction fee">Fee per vByte</td>
|
||||
<td *only-weight i18n="transaction.fee-per-wu|Transaction fee">Fee per weight unit</td>
|
||||
<td *ngIf="!isLoadingTx; else loadingTxFee">
|
||||
{{ tx.fee / (tx.weight / 4) | feeRounding }} <span class="symbol">sat/vB</span>
|
||||
<app-fee-rate [fee]="tx.fee" [weight]="tx.weight"></app-fee-rate>
|
||||
|
||||
<app-tx-fee-rating [tx]="tx"></app-tx-fee-rating>
|
||||
</td>
|
||||
|
|
|
@ -112,7 +112,7 @@ export class BisqTransactionComponent implements OnInit, OnDestroy {
|
|||
this.error = error;
|
||||
});
|
||||
|
||||
this.latestBlock$ = this.stateService.blocks$.pipe(map((([block]) => block)));
|
||||
this.latestBlock$ = this.stateService.blocks$.pipe(map((blocks) => blocks[0]));
|
||||
|
||||
this.stateService.bsqPrice$
|
||||
.subscribe((bsqPrice) => {
|
||||
|
|
|
@ -27,7 +27,7 @@ export class BisqTransfersComponent implements OnInit, OnChanges {
|
|||
}
|
||||
|
||||
ngOnInit() {
|
||||
this.latestBlock$ = this.stateService.blocks$.pipe(map(([block]) => block));
|
||||
this.latestBlock$ = this.stateService.blocks$.pipe(map((blocks) => blocks[0]));
|
||||
}
|
||||
|
||||
ngOnChanges() {
|
||||
|
|
|
@ -281,3 +281,15 @@ export function isFeatureActive(network: string, height: number, feature: 'rbf'
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function calcScriptHash$(script: string): Promise<string> {
|
||||
if (!/^[0-9a-fA-F]*$/.test(script) || script.length % 2 !== 0) {
|
||||
throw new Error('script is not a valid hex string');
|
||||
}
|
||||
const buf = Uint8Array.from(script.match(/.{2}/g).map((byte) => parseInt(byte, 16)));
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', buf);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
return hashArray
|
||||
.map((bytes) => bytes.toString(16).padStart(2, '0'))
|
||||
.join('');
|
||||
}
|
|
@ -220,7 +220,7 @@
|
|||
<img class="image" src="/resources/profile/mynodebtc.png" />
|
||||
<span>myNode</span>
|
||||
</a>
|
||||
<a href="https://github.com/RoninDojo/RoninDojo" target="_blank" title="RoninDojo">
|
||||
<a href="https://code.samourai.io/ronindojo/RoninDojo" target="_blank" title="RoninDojo">
|
||||
<img class="image" src="/resources/profile/ronindojo.png" />
|
||||
<span>RoninDojo</span>
|
||||
</a>
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<span i18n="shared.address">Address</span>
|
||||
</app-preview-title>
|
||||
<div class="row">
|
||||
<div class="col-md">
|
||||
<div class="col-md table-col">
|
||||
<div class="row d-flex justify-content-between">
|
||||
<div class="title-wrapper">
|
||||
<h1 class="title"><app-truncate [text]="addressString"></app-truncate></h1>
|
||||
|
|
|
@ -20,6 +20,11 @@
|
|||
margin-right: 15px;
|
||||
}
|
||||
|
||||
.table-col {
|
||||
max-width: calc(100% - 470px);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.table {
|
||||
font-size: 32px;
|
||||
margin-top: 48px;
|
||||
|
|
|
@ -64,13 +64,15 @@ export class AddressPreviewComponent implements OnInit, OnDestroy {
|
|||
this.address = null;
|
||||
this.addressInfo = null;
|
||||
this.addressString = params.get('id') || '';
|
||||
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}$/.test(this.addressString)) {
|
||||
if (/^[A-Z]{2,5}1[AC-HJ-NP-Z02-9]{8,100}|[A-F0-9]{130}$/.test(this.addressString)) {
|
||||
this.addressString = this.addressString.toLowerCase();
|
||||
}
|
||||
this.seoService.setTitle($localize`:@@address.component.browser-title:Address: ${this.addressString}:INTERPOLATION:`);
|
||||
|
||||
return this.electrsApiService.getAddress$(this.addressString)
|
||||
.pipe(
|
||||
return (this.addressString.match(/[a-f0-9]{130}/)
|
||||
? this.electrsApiService.getPubKeyAddress$(this.addressString)
|
||||
: this.electrsApiService.getAddress$(this.addressString)
|
||||
).pipe(
|
||||
catchError((err) => {
|
||||
this.isLoadingAddress = false;
|
||||
this.error = err;
|
||||
|
|
|
@ -81,6 +81,7 @@ h1 {
|
|||
top: 11px;
|
||||
}
|
||||
@media (min-width: 768px) {
|
||||
max-width: calc(100% - 180px);
|
||||
top: 17px;
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue