Merge pull request #3889 from mempool/mononaut/rust-gbt

Rust GBT
This commit is contained in:
wiz 2023-07-09 13:27:52 +09:00 committed by GitHub
commit 408c86963b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
43 changed files with 9505 additions and 107 deletions

View file

@ -27,6 +27,11 @@ jobs:
node-version: ${{ matrix.node }} node-version: ${{ matrix.node }}
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Install 1.70.x Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: 1.70
- name: Install - name: Install
if: ${{ matrix.flavor == 'dev'}} if: ${{ matrix.flavor == 'dev'}}
run: npm ci run: npm ci

1
.gitignore vendored
View file

@ -5,3 +5,4 @@ backend/mempool-config.json
*.swp *.swp
frontend/src/resources/config.template.js frontend/src/resources/config.template.js
frontend/src/resources/config.js frontend/src/resources/config.js
target

View file

@ -1,5 +1,6 @@
{ {
"editor.tabSize": 2, "editor.tabSize": 2,
"typescript.preferences.importModuleSpecifier": "relative", "typescript.preferences.importModuleSpecifier": "relative",
"typescript.tsdk": "./backend/node_modules/typescript/lib" "typescript.tsdk": "./backend/node_modules/typescript/lib",
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] }
} }

533
Cargo.lock generated Normal file
View file

@ -0,0 +1,533 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1586fa608b1dab41f667475b4a41faec5ba680aee428bfa5de4ea520fdc6e901"
dependencies = [
"quote",
"syn 2.0.20",
]
[[package]]
name = "gbt"
version = "0.1.0"
dependencies = [
"bytemuck",
"bytes",
"napi",
"napi-build",
"napi-derive",
"priority-queue",
"tracing",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
]
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "napi"
version = "2.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
"tokio",
]
[[package]]
name = "napi-build"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
[[package]]
name = "napi-derive"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20bbc7c69168d06a848f925ec5f0e0997f98e8c8d4f2cc30157f0da51c009e17"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"regex",
"semver",
"syn 1.0.109",
]
[[package]]
name = "napi-sys"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
dependencies = [
"libloading",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num_cpus"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
name = "priority-queue"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61"
dependencies = [
"autocfg",
"indexmap",
]
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.2",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "sharded-slab"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
dependencies = [
"lazy_static",
]
[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tokio"
version = "1.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
dependencies = [
"autocfg",
"num_cpus",
"pin-project-lite",
"windows-sys",
]
[[package]]
name = "tracing"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.20",
]
[[package]]
name = "tracing-core"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
dependencies = [
"lazy_static",
"log",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

8
Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[workspace]
members = [
"./backend/rust-gbt",
]
[profile.release]
lto = true
codegen-units = 1

View file

@ -79,6 +79,8 @@ Query OK, 0 rows affected (0.00 sec)
_Make sure to use Node.js 16.10 and npm 7._ _Make sure to use Node.js 16.10 and npm 7._
_The build process requires [Rust](https://www.rust-lang.org/tools/install) to be installed._
Install dependencies with `npm` and build the backend: Install dependencies with `npm` and build the backend:
``` ```

View file

@ -27,6 +27,7 @@
"AUDIT": false, "AUDIT": false,
"ADVANCED_GBT_AUDIT": false, "ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false, "ADVANCED_GBT_MEMPOOL": false,
"RUST_GBT": false,
"CPFP_INDEXING": false, "CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6 "DISK_CACHE_BLOCK_INTERVAL": 6
}, },

View file

@ -19,6 +19,7 @@
"maxmind": "~4.3.8", "maxmind": "~4.3.8",
"mysql2": "~3.2.0", "mysql2": "~3.2.0",
"node-worker-threads-pool": "~1.5.1", "node-worker-threads-pool": "~1.5.1",
"rust-gbt": "file:./rust-gbt",
"socks-proxy-agent": "~7.0.0", "socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4", "typescript": "~4.7.4",
"ws": "~8.13.0" "ws": "~8.13.0"
@ -1485,6 +1486,22 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"dev": true,
"bin": {
"napi": "scripts/index.js"
},
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
},
"node_modules/@noble/hashes": { "node_modules/@noble/hashes": {
"version": "1.3.0", "version": "1.3.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz", "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz",
@ -6665,6 +6682,10 @@
"queue-microtask": "^1.2.2" "queue-microtask": "^1.2.2"
} }
}, },
"node_modules/rust-gbt": {
"resolved": "rust-gbt",
"link": true
},
"node_modules/safe-buffer": { "node_modules/safe-buffer": {
"version": "5.2.1", "version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -7544,6 +7565,17 @@
"funding": { "funding": {
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
},
"rust-gbt": {
"name": "gbt",
"version": "0.1.0",
"hasInstallScript": true,
"devDependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
} }
}, },
"dependencies": { "dependencies": {
@ -8631,6 +8663,12 @@
"resolved": "https://registry.npmjs.org/@mempool/electrum-client/-/electrum-client-1.1.9.tgz", "resolved": "https://registry.npmjs.org/@mempool/electrum-client/-/electrum-client-1.1.9.tgz",
"integrity": "sha512-mlvPiCzUlaETpYW3i6V87A24jjMYgsebaXtUo3WQyyLnYUuxs0KiXQ2mnKh3h15j8Xg/hfxeGIi+5OC9u0nftQ==" "integrity": "sha512-mlvPiCzUlaETpYW3i6V87A24jjMYgsebaXtUo3WQyyLnYUuxs0KiXQ2mnKh3h15j8Xg/hfxeGIi+5OC9u0nftQ=="
}, },
"@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"dev": true
},
"@noble/hashes": { "@noble/hashes": {
"version": "1.3.0", "version": "1.3.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz", "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.0.tgz",
@ -12481,6 +12519,12 @@
"queue-microtask": "^1.2.2" "queue-microtask": "^1.2.2"
} }
}, },
"rust-gbt": {
"version": "file:rust-gbt",
"requires": {
"@napi-rs/cli": "^2.16.1"
}
},
"safe-buffer": { "safe-buffer": {
"version": "5.2.1", "version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",

View file

@ -22,10 +22,10 @@
"main": "index.ts", "main": "index.ts",
"scripts": { "scripts": {
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json", "tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
"build": "npm run tsc && npm run create-resources", "build": "npm run build-rust && npm run tsc && npm run create-resources",
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js", "create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps", "package": "npm run build && rm -rf package && mv dist package && mv node_modules package && mv rust-gbt package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)", "package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint @napi-rs ../rust-gbt/target ../rust-gbt/node_modules ../rust-gbt/src)",
"start": "node --max-old-space-size=2048 dist/index.js", "start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js", "start-production": "node --max-old-space-size=16384 dist/index.js",
"reindex-updated-pools": "npm run start-production --update-pools", "reindex-updated-pools": "npm run start-production --update-pools",
@ -33,7 +33,8 @@
"test": "./node_modules/.bin/jest --coverage", "test": "./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts", "lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix", "lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"" "prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\"",
"build-rust": "cd rust-gbt && npm install"
}, },
"dependencies": { "dependencies": {
"@babel/core": "^7.21.3", "@babel/core": "^7.21.3",
@ -46,6 +47,7 @@
"maxmind": "~4.3.8", "maxmind": "~4.3.8",
"mysql2": "~3.2.0", "mysql2": "~3.2.0",
"node-worker-threads-pool": "~1.5.1", "node-worker-threads-pool": "~1.5.1",
"rust-gbt": "file:./rust-gbt",
"socks-proxy-agent": "~7.0.0", "socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4", "typescript": "~4.7.4",
"ws": "~8.13.0" "ws": "~8.13.0"

4
backend/rust-gbt/.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
*.node
**/node_modules
**/.DS_Store
npm-debug.log*

View file

@ -0,0 +1,25 @@
[package]
name = "gbt"
version = "0.1.0"
description = "An inefficient re-implementation of the getBlockTemplate algorithm in Rust"
authors = ["mononaut"]
edition = "2021"
publish = false
[lib]
crate-type = ["cdylib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
priority-queue = "1.3.2"
bytes = "1.4.0"
napi = { version = "2.13.2", features = ["napi8", "tokio_rt"] }
napi-derive = "2.13.0"
bytemuck = "1.13.1"
tracing = "0.1.36"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.15", features = ["env-filter"]}
[build-dependencies]
napi-build = "2.0.1"

123
backend/rust-gbt/README.md Normal file
View file

@ -0,0 +1,123 @@
# gbt
**gbt:** rust implementation of the getBlockTemplate algorithm
This project was bootstrapped by [napi](https://www.npmjs.com/package/@napi-rs/cli).
## Installing gbt
Installing gbt requires a [supported version of Node and Rust](https://github.com/napi-rs/napi-rs#platform-support).
The build process also requires [Rust](https://www.rust-lang.org/tools/install) to be installed.
You can install the project with npm. In the project directory, run:
```sh
$ npm install
```
This fully installs the project, including installing any dependencies and running the build.
## Building gbt
If you have already installed the project and only want to run the build, run:
```sh
$ npm run build
```
This command uses the [napi build](https://www.npmjs.com/package/@napi-rs/cli) utility to run the Rust build and copy the built library into `./gbt.[TARGET_TRIPLE].node`.
## Exploring gbt
After building gbt, you can explore its exports at the Node REPL:
```sh
$ npm install
$ node
> require('.').hello()
"hello node"
```
## Available Scripts
In the project directory, you can run:
### `npm install`
Installs the project, including running `npm run build-release`.
### `npm build`
Builds the Node addon (`gbt.[TARGET_TRIPLE].node`) from source.
Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm build` and `npm build-*` commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html):
```
npm run build -- --feature=beetle
```
#### `npm build-debug`
Alias for `npm build`.
#### `npm build-release`
Same as [`npm build`](#npm-build) but, builds the module with the [`release`](https://doc.rust-lang.org/cargo/reference/profiles.html#release) profile. Release builds will compile slower, but run faster.
### `npm test`
Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/).
## Project Layout
The directory structure of this project is:
```
gbt/
├── Cargo.toml
├── README.md
├── gbt.[TARGET_TRIPLE].node
├── package.json
├── src/
| └── lib.rs
└── target/
```
### Cargo.toml
The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command.
### README.md
This file.
### gbt.\[TARGET_TRIPLE\].node
The Node addon—i.e., a binary Node module—generated by building the project. This is the main module for this package, as dictated by the `"main"` key in `package.json`.
Under the hood, a [Node addon](https://nodejs.org/api/addons.html) is a [dynamically-linked shared object](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries). The `"build"` script produces this file by copying it from within the `target/` directory, which is where the Rust build produces the shared object.
### package.json
The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command.
### src/
The directory tree containing the Rust source code for the project.
### src/lib.rs
The Rust library's main module.
### target/
Binary artifacts generated by the Rust build.
## Learn More
To learn more about Neon, see the [Napi-RS documentation](https://napi.rs/docs/introduction/getting-started).
To learn more about Rust, see the [Rust documentation](https://www.rust-lang.org).
To learn more about Node, see the [Node documentation](https://nodejs.org).

View file

@ -0,0 +1,3 @@
fn main() {
napi_build::setup();
}

45
backend/rust-gbt/index.d.ts vendored Normal file
View file

@ -0,0 +1,45 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export interface ThreadTransaction {
uid: number
order: number
fee: number
weight: number
sigops: number
effectiveFeePerVsize: number
inputs: Array<number>
}
export class GbtGenerator {
constructor()
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
make(mempool: Array<ThreadTransaction>, maxUid: number): Promise<GbtResult>
/**
* # Errors
*
* Rejects if the thread panics or if the Mutex is poisoned.
*/
update(newTxs: Array<ThreadTransaction>, removeTxs: Array<number>, maxUid: number): Promise<GbtResult>
}
/**
* The result from calling the gbt function.
*
* This tuple contains the following:
* blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
* block_weights: A Vector of total weights per block.
* clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
* rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
*/
export class GbtResult {
blocks: Array<Array<number>>
blockWeights: Array<number>
clusters: Array<Array<number>>
rates: Array<Array<number>>
constructor(blocks: Array<Array<number>>, blockWeights: Array<number>, clusters: Array<Array<number>>, rates: Array<Array<number>>)
}

258
backend/rust-gbt/index.js Normal file
View file

@ -0,0 +1,258 @@
/* tslint:disable */
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
let nativeBinding = null
let localFileExisted = false
let loadError = null
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm64.node')
} else {
nativeBinding = require('gbt-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'gbt.android-arm-eabi.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.android-arm-eabi.node')
} else {
nativeBinding = require('gbt-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-x64-msvc.node')
} else {
nativeBinding = require('gbt-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-ia32-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-ia32-msvc.node')
} else {
nativeBinding = require('gbt-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.win32-arm64-msvc.node')
} else {
nativeBinding = require('gbt-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-universal.node')
} else {
nativeBinding = require('gbt-darwin-universal')
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'gbt.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-x64.node')
} else {
nativeBinding = require('gbt-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'gbt.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.darwin-arm64.node')
} else {
nativeBinding = require('gbt-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'gbt.freebsd-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./gbt.freebsd-x64.node')
} else {
nativeBinding = require('gbt-freebsd-x64')
}
} catch (e) {
loadError = e
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-musl.node')
} else {
nativeBinding = require('gbt-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-x64-gnu.node')
} else {
nativeBinding = require('gbt-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-musl.node')
} else {
nativeBinding = require('gbt-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm64-gnu.node')
} else {
nativeBinding = require('gbt-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
localFileExisted = existsSync(
join(__dirname, 'gbt.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./gbt.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('gbt-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
throw loadError
}
throw new Error(`Failed to load native binding`)
}
const { GbtGenerator, GbtResult } = nativeBinding
module.exports.GbtGenerator = GbtGenerator
module.exports.GbtResult = GbtResult

34
backend/rust-gbt/package-lock.json generated Normal file
View file

@ -0,0 +1,34 @@
{
"name": "gbt",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "gbt",
"version": "0.1.0",
"hasInstallScript": true,
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/@napi-rs/cli": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.16.1.tgz",
"integrity": "sha512-L0Gr5iEQIDEbvWdDr1HUaBOxBSHL1VZhWSk1oryawoT8qJIY+KGfLFelU+Qma64ivCPbxYpkfPoKYVG3rcoGIA==",
"bin": {
"napi": "scripts/index.js"
},
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
}
}
}

View file

@ -0,0 +1,33 @@
{
"name": "gbt",
"version": "0.1.0",
"description": "An inefficient re-implementation of the getBlockTemplate algorithm in Rust",
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform",
"build-debug": "npm run build",
"build-release": "npm run build -- --release --strip",
"install": "npm run build-release",
"prepublishOnly": "napi prepublish -t npm",
"test": "cargo test"
},
"author": "mononaut",
"napi": {
"name": "gbt",
"triples": {
"defaults": false,
"additional": [
"x86_64-unknown-linux-gnu",
"x86_64-unknown-freebsd"
]
}
},
"dependencies": {
"@napi-rs/cli": "^2.16.1"
},
"engines": {
"node": ">= 12"
}
}

View file

@ -0,0 +1,220 @@
use crate::{
u32_hasher_types::{u32hashset_new, U32HasherState},
ThreadTransaction,
};
use std::{
cmp::Ordering,
collections::HashSet,
hash::{Hash, Hasher},
};
#[allow(clippy::struct_excessive_bools)]
#[derive(Clone, Debug)]
pub struct AuditTransaction {
pub uid: u32,
order: u32,
pub fee: u64,
pub weight: u32,
// exact sigop-adjusted weight
pub sigop_adjusted_weight: u32,
// sigop-adjusted vsize rounded up the the next integer
pub sigop_adjusted_vsize: u32,
pub sigops: u32,
adjusted_fee_per_vsize: f64,
pub effective_fee_per_vsize: f64,
pub dependency_rate: f64,
pub inputs: Vec<u32>,
pub relatives_set_flag: bool,
pub ancestors: HashSet<u32, U32HasherState>,
pub children: HashSet<u32, U32HasherState>,
ancestor_fee: u64,
ancestor_sigop_adjusted_weight: u32,
ancestor_sigop_adjusted_vsize: u32,
ancestor_sigops: u32,
// Safety: Must be private to prevent NaN breaking Ord impl.
score: f64,
pub used: bool,
/// whether this transaction has been moved to the "modified" priority queue
pub modified: bool,
pub dirty: bool,
}
impl Hash for AuditTransaction {
fn hash<H: Hasher>(&self, state: &mut H) {
self.uid.hash(state);
}
}
impl PartialEq for AuditTransaction {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for AuditTransaction {}
#[inline]
pub fn partial_cmp_uid_score(a: (u32, u32, f64), b: (u32, u32, f64)) -> Option<Ordering> {
// If either score is NaN, this is false,
// and partial_cmp will return None
if a.2 != b.2 {
// compare by score (sorts by ascending score)
a.2.partial_cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by comparing partial txids (sorts by descending txid)
Some(b.1.cmp(&a.1))
} else {
// tie-break partial txid collisions by comparing uids (sorts by descending uid)
Some(b.0.cmp(&a.0))
}
}
impl PartialOrd for AuditTransaction {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for AuditTransaction {
fn cmp(&self, other: &Self) -> Ordering {
// Safety: The only possible values for score are f64
// that are not NaN. This is because outside code can not
// freely assign score. Also, calc_new_score guarantees no NaN.
self.partial_cmp(other).expect("score will never be NaN")
}
}
#[inline]
fn calc_fee_rate(fee: f64, vsize: f64) -> f64 {
fee / (if vsize == 0.0 { 1.0 } else { vsize })
}
impl AuditTransaction {
pub fn from_thread_transaction(tx: &ThreadTransaction) -> Self {
// rounded up to the nearest integer
let is_adjusted = tx.weight < (tx.sigops * 20);
let sigop_adjusted_vsize = ((tx.weight + 3) / 4).max(tx.sigops * 5);
let sigop_adjusted_weight = tx.weight.max(tx.sigops * 20);
let effective_fee_per_vsize = if is_adjusted {
calc_fee_rate(tx.fee, f64::from(sigop_adjusted_weight) / 4.0)
} else {
tx.effective_fee_per_vsize
};
Self {
uid: tx.uid,
order: tx.order,
fee: tx.fee as u64,
weight: tx.weight,
sigop_adjusted_weight,
sigop_adjusted_vsize,
sigops: tx.sigops,
adjusted_fee_per_vsize: calc_fee_rate(tx.fee, f64::from(sigop_adjusted_vsize)),
effective_fee_per_vsize,
dependency_rate: f64::INFINITY,
inputs: tx.inputs.clone(),
relatives_set_flag: false,
ancestors: u32hashset_new(),
children: u32hashset_new(),
ancestor_fee: tx.fee as u64,
ancestor_sigop_adjusted_weight: sigop_adjusted_weight,
ancestor_sigop_adjusted_vsize: sigop_adjusted_vsize,
ancestor_sigops: tx.sigops,
score: 0.0,
used: false,
modified: false,
dirty: effective_fee_per_vsize != tx.effective_fee_per_vsize,
}
}
#[inline]
pub const fn score(&self) -> f64 {
self.score
}
#[inline]
pub const fn order(&self) -> u32 {
self.order
}
#[inline]
pub const fn ancestor_sigop_adjusted_vsize(&self) -> u32 {
self.ancestor_sigop_adjusted_vsize
}
#[inline]
pub const fn ancestor_sigops(&self) -> u32 {
self.ancestor_sigops
}
#[inline]
pub fn cluster_rate(&self) -> f64 {
// Safety: self.ancestor_weight can never be 0.
// Even if it could, as it approaches 0, the value inside the min() call
// grows, so if we think of 0 as "grew infinitely" then dependency_rate would be
// the smaller of the two. If either side is NaN, the other side is returned.
self.dependency_rate.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_weight) / 4.0,
))
}
pub fn set_dirty_if_different(&mut self, cluster_rate: f64) {
if self.effective_fee_per_vsize != cluster_rate {
self.effective_fee_per_vsize = cluster_rate;
self.dirty = true;
}
}
/// Safety: This function must NEVER set score to NaN.
#[inline]
fn calc_new_score(&mut self) {
self.score = self.adjusted_fee_per_vsize.min(calc_fee_rate(
self.ancestor_fee as f64,
f64::from(self.ancestor_sigop_adjusted_vsize),
));
}
#[inline]
pub fn set_ancestors(
&mut self,
ancestors: HashSet<u32, U32HasherState>,
total_fee: u64,
total_sigop_adjusted_weight: u32,
total_sigop_adjusted_vsize: u32,
total_sigops: u32,
) {
self.ancestors = ancestors;
self.ancestor_fee = self.fee + total_fee;
self.ancestor_sigop_adjusted_weight =
self.sigop_adjusted_weight + total_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize = self.sigop_adjusted_vsize + total_sigop_adjusted_vsize;
self.ancestor_sigops = self.sigops + total_sigops;
self.calc_new_score();
self.relatives_set_flag = true;
}
#[inline]
pub fn remove_root(
&mut self,
root_txid: u32,
root_fee: u64,
root_sigop_adjusted_weight: u32,
root_sigop_adjusted_vsize: u32,
root_sigops: u32,
cluster_rate: f64,
) -> f64 {
let old_score = self.score();
self.dependency_rate = self.dependency_rate.min(cluster_rate);
if self.ancestors.remove(&root_txid) {
self.ancestor_fee -= root_fee;
self.ancestor_sigop_adjusted_weight -= root_sigop_adjusted_weight;
self.ancestor_sigop_adjusted_vsize -= root_sigop_adjusted_vsize;
self.ancestor_sigops -= root_sigops;
self.calc_new_score();
}
old_score
}
}

421
backend/rust-gbt/src/gbt.rs Normal file
View file

@ -0,0 +1,421 @@
use priority_queue::PriorityQueue;
use std::{cmp::Ordering, collections::HashSet, mem::ManuallyDrop};
use tracing::{info, trace};
use crate::{
audit_transaction::{partial_cmp_uid_score, AuditTransaction},
u32_hasher_types::{u32hashset_new, u32priority_queue_with_capacity, U32HasherState},
GbtResult, ThreadTransactionsMap,
};
const MAX_BLOCK_WEIGHT_UNITS: u32 = 4_000_000 - 4_000;
const BLOCK_SIGOPS: u32 = 80_000;
const BLOCK_RESERVED_WEIGHT: u32 = 4_000;
const BLOCK_RESERVED_SIGOPS: u32 = 400;
const MAX_BLOCKS: usize = 8;
type AuditPool = Vec<Option<ManuallyDrop<AuditTransaction>>>;
type ModifiedQueue = PriorityQueue<u32, TxPriority, U32HasherState>;
#[derive(Debug)]
struct TxPriority {
uid: u32,
order: u32,
score: f64,
}
impl PartialEq for TxPriority {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for TxPriority {}
impl PartialOrd for TxPriority {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
partial_cmp_uid_score(
(self.uid, self.order, self.score),
(other.uid, other.order, other.score),
)
}
}
impl Ord for TxPriority {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).expect("score will never be NaN")
}
}
/// Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core.
///
/// See `BlockAssembler` in Bitcoin Core's
/// [miner.cpp](https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp).
/// Ported from mempool backend's
/// [tx-selection-worker.ts](https://github.com/mempool/mempool/blob/master/backend/src/api/tx-selection-worker.ts).
//
// TODO: Make gbt smaller to fix these lints.
#[allow(clippy::too_many_lines)]
#[allow(clippy::cognitive_complexity)]
pub fn gbt(mempool: &mut ThreadTransactionsMap, max_uid: usize) -> GbtResult {
let mempool_len = mempool.len();
let mut audit_pool: AuditPool = Vec::with_capacity(max_uid + 1);
audit_pool.resize(max_uid + 1, None);
let mut mempool_stack: Vec<u32> = Vec::with_capacity(mempool_len);
let mut clusters: Vec<Vec<u32>> = Vec::new();
let mut block_weights: Vec<u32> = Vec::new();
info!("Initializing working structs");
for (uid, tx) in &mut *mempool {
let audit_tx = AuditTransaction::from_thread_transaction(tx);
// Safety: audit_pool and mempool_stack must always contain the same transactions
audit_pool[*uid as usize] = Some(ManuallyDrop::new(audit_tx));
mempool_stack.push(*uid);
}
info!("Building relatives graph & calculate ancestor scores");
for txid in &mempool_stack {
set_relatives(*txid, &mut audit_pool);
}
trace!("Post relative graph Audit Pool: {:#?}", audit_pool);
info!("Sorting by descending ancestor score");
let mut mempool_stack: Vec<(u32, u32, f64)> = mempool_stack
.into_iter()
.map(|txid| {
let atx = audit_pool
.get(txid as usize)
.and_then(Option::as_ref)
.expect("All txids are from audit_pool");
(txid, atx.order(), atx.score())
})
.collect();
mempool_stack.sort_unstable_by(|a, b| partial_cmp_uid_score(*a, *b).expect("Not NaN"));
let mut mempool_stack: Vec<u32> = mempool_stack.into_iter().map(|(txid, _, _)| txid).collect();
info!("Building blocks by greedily choosing the highest feerate package");
info!("(i.e. the package rooted in the transaction with the best ancestor score)");
let mut blocks: Vec<Vec<u32>> = Vec::new();
let mut block_weight: u32 = BLOCK_RESERVED_WEIGHT;
let mut block_sigops: u32 = BLOCK_RESERVED_SIGOPS;
// No need to be bigger than 4096 transactions for the per-block transaction Vec.
let initial_txes_per_block: usize = 4096.min(mempool_len);
let mut transactions: Vec<u32> = Vec::with_capacity(initial_txes_per_block);
let mut modified: ModifiedQueue = u32priority_queue_with_capacity(mempool_len);
let mut overflow: Vec<u32> = Vec::new();
let mut failures = 0;
while !mempool_stack.is_empty() || !modified.is_empty() {
// This trace log storm is big, so to make scrolling through
// Each iteration easier, leaving a bunch of empty rows
// And a header of ======
trace!("\n\n\n\n\n\n\n\n\n\n==================================");
trace!("mempool_array: {:#?}", mempool_stack);
trace!("clusters: {:#?}", clusters);
trace!("modified: {:#?}", modified);
trace!("audit_pool: {:#?}", audit_pool);
trace!("blocks: {:#?}", blocks);
trace!("block_weight: {:#?}", block_weight);
trace!("block_sigops: {:#?}", block_sigops);
trace!("transactions: {:#?}", transactions);
trace!("overflow: {:#?}", overflow);
trace!("failures: {:#?}", failures);
trace!("\n==================================");
let next_from_stack = next_valid_from_stack(&mut mempool_stack, &audit_pool);
let next_from_queue = next_valid_from_queue(&mut modified, &audit_pool);
if next_from_stack.is_none() && next_from_queue.is_none() {
continue;
}
let (next_tx, from_stack) = match (next_from_stack, next_from_queue) {
(Some(stack_tx), Some(queue_tx)) => match queue_tx.cmp(stack_tx) {
std::cmp::Ordering::Less => (stack_tx, true),
_ => (queue_tx, false),
},
(Some(stack_tx), None) => (stack_tx, true),
(None, Some(queue_tx)) => (queue_tx, false),
(None, None) => unreachable!(),
};
if from_stack {
mempool_stack.pop();
} else {
modified.pop();
}
if blocks.len() < (MAX_BLOCKS - 1)
&& ((block_weight + (4 * next_tx.ancestor_sigop_adjusted_vsize())
>= MAX_BLOCK_WEIGHT_UNITS)
|| (block_sigops + next_tx.ancestor_sigops() > BLOCK_SIGOPS))
{
// hold this package in an overflow list while we check for smaller options
overflow.push(next_tx.uid);
failures += 1;
} else {
let mut package: Vec<(u32, u32, usize)> = Vec::new();
let mut cluster: Vec<u32> = Vec::new();
let is_cluster: bool = !next_tx.ancestors.is_empty();
for ancestor_id in &next_tx.ancestors {
if let Some(Some(ancestor)) = audit_pool.get(*ancestor_id as usize) {
package.push((*ancestor_id, ancestor.order(), ancestor.ancestors.len()));
}
}
package.sort_unstable_by(|a, b| -> Ordering {
if a.2 != b.2 {
// order by ascending ancestor count
a.2.cmp(&b.2)
} else if a.1 != b.1 {
// tie-break by ascending partial txid
a.1.cmp(&b.1)
} else {
// tie-break partial txid collisions by ascending uid
a.0.cmp(&b.0)
}
});
package.push((next_tx.uid, next_tx.order(), next_tx.ancestors.len()));
let cluster_rate = next_tx.cluster_rate();
for (txid, _, _) in &package {
cluster.push(*txid);
if let Some(Some(tx)) = audit_pool.get_mut(*txid as usize) {
tx.used = true;
tx.set_dirty_if_different(cluster_rate);
transactions.push(tx.uid);
block_weight += tx.weight;
block_sigops += tx.sigops;
}
update_descendants(*txid, &mut audit_pool, &mut modified, cluster_rate);
}
if is_cluster {
clusters.push(cluster);
}
failures = 0;
}
// this block is full
let exceeded_package_tries =
failures > 1000 && block_weight > (MAX_BLOCK_WEIGHT_UNITS - BLOCK_RESERVED_WEIGHT);
let queue_is_empty = mempool_stack.is_empty() && modified.is_empty();
if (exceeded_package_tries || queue_is_empty) && blocks.len() < (MAX_BLOCKS - 1) {
// finalize this block
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
// reset for the next block
transactions = Vec::with_capacity(initial_txes_per_block);
block_weight = BLOCK_RESERVED_WEIGHT;
block_sigops = BLOCK_RESERVED_SIGOPS;
failures = 0;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
overflow.reverse();
for overflowed in &overflow {
if let Some(Some(overflowed_tx)) = audit_pool.get(*overflowed as usize) {
if overflowed_tx.modified {
modified.push(
*overflowed,
TxPriority {
uid: *overflowed,
order: overflowed_tx.order(),
score: overflowed_tx.score(),
},
);
} else {
mempool_stack.push(*overflowed);
}
}
}
overflow = Vec::new();
}
}
info!("add the final unbounded block if it contains any transactions");
if !transactions.is_empty() {
blocks.push(transactions);
block_weights.push(block_weight);
}
info!("make a list of dirty transactions and their new rates");
let mut rates: Vec<Vec<f64>> = Vec::new();
for (uid, thread_tx) in mempool {
// Takes ownership of the audit_tx and replaces with None
if let Some(Some(audit_tx)) = audit_pool.get_mut(*uid as usize).map(Option::take) {
trace!("txid: {}, is_dirty: {}", uid, audit_tx.dirty);
if audit_tx.dirty {
rates.push(vec![f64::from(*uid), audit_tx.effective_fee_per_vsize]);
thread_tx.effective_fee_per_vsize = audit_tx.effective_fee_per_vsize;
}
// Drops the AuditTransaction manually
// There are no audit_txs that are not in the mempool HashMap
// So there is guaranteed to be no memory leaks.
ManuallyDrop::into_inner(audit_tx);
}
}
trace!("\n\n\n\n\n====================");
trace!("blocks: {:#?}", blocks);
trace!("clusters: {:#?}", clusters);
trace!("rates: {:#?}\n====================\n\n\n\n\n", rates);
GbtResult {
blocks,
block_weights,
clusters,
rates,
}
}
fn next_valid_from_stack<'a>(
mempool_stack: &mut Vec<u32>,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some(next_txid) = mempool_stack.last() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used && !tx.modified => {
return Some(tx);
}
_ => {
mempool_stack.pop();
}
}
}
None
}
fn next_valid_from_queue<'a>(
queue: &mut ModifiedQueue,
audit_pool: &'a AuditPool,
) -> Option<&'a AuditTransaction> {
while let Some((next_txid, _)) = queue.peek() {
match audit_pool.get(*next_txid as usize) {
Some(Some(tx)) if !tx.used => {
return Some(tx);
}
_ => {
queue.pop();
}
}
}
None
}
fn set_relatives(txid: u32, audit_pool: &mut AuditPool) {
let mut parents: HashSet<u32, U32HasherState> = u32hashset_new();
if let Some(Some(tx)) = audit_pool.get(txid as usize) {
if tx.relatives_set_flag {
return;
}
for input in &tx.inputs {
parents.insert(*input);
}
} else {
return;
}
let mut ancestors: HashSet<u32, U32HasherState> = u32hashset_new();
for parent_id in &parents {
set_relatives(*parent_id, audit_pool);
if let Some(Some(parent)) = audit_pool.get_mut(*parent_id as usize) {
// Safety: ancestors must always contain only txes in audit_pool
ancestors.insert(*parent_id);
parent.children.insert(txid);
for ancestor in &parent.ancestors {
ancestors.insert(*ancestor);
}
}
}
let mut total_fee: u64 = 0;
let mut total_sigop_adjusted_weight: u32 = 0;
let mut total_sigop_adjusted_vsize: u32 = 0;
let mut total_sigops: u32 = 0;
for ancestor_id in &ancestors {
let Some(ancestor) = audit_pool
.get(*ancestor_id as usize)
.expect("audit_pool contains all ancestors") else { todo!() };
total_fee += ancestor.fee;
total_sigop_adjusted_weight += ancestor.sigop_adjusted_weight;
total_sigop_adjusted_vsize += ancestor.sigop_adjusted_vsize;
total_sigops += ancestor.sigops;
}
if let Some(Some(tx)) = audit_pool.get_mut(txid as usize) {
tx.set_ancestors(
ancestors,
total_fee,
total_sigop_adjusted_weight,
total_sigop_adjusted_vsize,
total_sigops,
);
}
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
fn update_descendants(
root_txid: u32,
audit_pool: &mut AuditPool,
modified: &mut ModifiedQueue,
cluster_rate: f64,
) {
let mut visited: HashSet<u32, U32HasherState> = u32hashset_new();
let mut descendant_stack: Vec<u32> = Vec::new();
let root_fee: u64;
let root_sigop_adjusted_weight: u32;
let root_sigop_adjusted_vsize: u32;
let root_sigops: u32;
if let Some(Some(root_tx)) = audit_pool.get(root_txid as usize) {
for descendant_id in &root_tx.children {
if !visited.contains(descendant_id) {
descendant_stack.push(*descendant_id);
visited.insert(*descendant_id);
}
}
root_fee = root_tx.fee;
root_sigop_adjusted_weight = root_tx.sigop_adjusted_weight;
root_sigop_adjusted_vsize = root_tx.sigop_adjusted_vsize;
root_sigops = root_tx.sigops;
} else {
return;
}
while let Some(next_txid) = descendant_stack.pop() {
if let Some(Some(descendant)) = audit_pool.get_mut(next_txid as usize) {
// remove root tx as ancestor
let old_score = descendant.remove_root(
root_txid,
root_fee,
root_sigop_adjusted_weight,
root_sigop_adjusted_vsize,
root_sigops,
cluster_rate,
);
// add to priority queue or update priority if score has changed
if descendant.score() < old_score {
descendant.modified = true;
modified.push_decrease(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
} else if descendant.score() > old_score {
descendant.modified = true;
modified.push_increase(
descendant.uid,
TxPriority {
uid: descendant.uid,
order: descendant.order(),
score: descendant.score(),
},
);
}
// add this node's children to the stack
for child_id in &descendant.children {
if !visited.contains(child_id) {
descendant_stack.push(*child_id);
visited.insert(*child_id);
}
}
}
}
}

177
backend/rust-gbt/src/lib.rs Normal file
View file

@ -0,0 +1,177 @@
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::float_cmp)]
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use thread_transaction::ThreadTransaction;
use tracing::{debug, info, trace};
use tracing_log::LogTracer;
use tracing_subscriber::{EnvFilter, FmtSubscriber};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
mod audit_transaction;
mod gbt;
mod thread_transaction;
mod u32_hasher_types;
use u32_hasher_types::{u32hashmap_with_capacity, U32HasherState};
/// This is the initial capacity of the `GbtGenerator` struct's inner `HashMap`.
///
/// Note: This doesn't *have* to be a power of 2. (uwu)
const STARTING_CAPACITY: usize = 1_048_576;
type ThreadTransactionsMap = HashMap<u32, ThreadTransaction, U32HasherState>;
#[napi]
pub struct GbtGenerator {
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
}
#[napi::module_init]
fn init() {
// Set all `tracing` logs to print to STDOUT
// Note: Passing RUST_LOG env variable to the node process
// will change the log level for the rust module.
tracing::subscriber::set_global_default(
FmtSubscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.with_ansi(
// Default to no-color logs.
// Setting RUST_LOG_COLOR to 1 or true|TRUE|True etc.
// will enable color
std::env::var("RUST_LOG_COLOR")
.map(|s| ["1", "true"].contains(&&*s.to_lowercase()))
.unwrap_or(false),
)
.finish(),
)
.expect("Logging subscriber failed");
// Convert all `log` logs into `tracing` events
LogTracer::init().expect("Legacy log subscriber failed");
}
#[napi]
impl GbtGenerator {
#[napi(constructor)]
#[allow(clippy::new_without_default)]
#[must_use]
pub fn new() -> Self {
debug!("Created new GbtGenerator");
Self {
thread_transactions: Arc::new(Mutex::new(u32hashmap_with_capacity(STARTING_CAPACITY))),
}
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn make(&self, mempool: Vec<ThreadTransaction>, max_uid: u32) -> Result<GbtResult> {
trace!("make: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in mempool {
map.insert(tx.uid, tx);
}
},
)
.await
}
/// # Errors
///
/// Rejects if the thread panics or if the Mutex is poisoned.
#[napi]
pub async fn update(
&self,
new_txs: Vec<ThreadTransaction>,
remove_txs: Vec<u32>,
max_uid: u32,
) -> Result<GbtResult> {
trace!("update: Current State {:#?}", self.thread_transactions);
run_task(
Arc::clone(&self.thread_transactions),
max_uid as usize,
move |map| {
for tx in new_txs {
map.insert(tx.uid, tx);
}
for txid in &remove_txs {
map.remove(txid);
}
},
)
.await
}
}
/// The result from calling the gbt function.
///
/// This tuple contains the following:
/// blocks: A 2D Vector of transaction IDs (u32), the inner Vecs each represent a block.
/// block_weights: A Vector of total weights per block.
/// clusters: A 2D Vector of transaction IDs representing clusters of dependent mempool transactions
/// rates: A Vector of tuples containing transaction IDs (u32) and effective fee per vsize (f64)
#[napi(constructor)]
pub struct GbtResult {
pub blocks: Vec<Vec<u32>>,
pub block_weights: Vec<u32>,
pub clusters: Vec<Vec<u32>>,
pub rates: Vec<Vec<f64>>, // Tuples not supported. u32 fits inside f64
}
/// All on another thread, this runs an arbitrary task in between
/// taking the lock and running gbt.
///
/// Rather than filling / updating the `HashMap` on the main thread,
/// this allows for `HashMap` modifying tasks to be run before running and returning gbt results.
///
/// `thread_transactions` is a cloned `Arc` of the `Mutex` for the `HashMap` state.
/// `callback` is a `'static + Send` `FnOnce` closure/function that takes a mutable reference
/// to the `HashMap` as the only argument. (A move closure is recommended to meet the bounds)
async fn run_task<F>(
thread_transactions: Arc<Mutex<ThreadTransactionsMap>>,
max_uid: usize,
callback: F,
) -> Result<GbtResult>
where
F: FnOnce(&mut ThreadTransactionsMap) + Send + 'static,
{
debug!("Spawning thread...");
let handle = napi::tokio::task::spawn_blocking(move || {
debug!(
"Getting lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
let mut map = thread_transactions
.lock()
.map_err(|_| napi::Error::from_reason("THREAD_TRANSACTIONS Mutex poisoned"))?;
callback(&mut map);
info!("Starting gbt algorithm for {} elements...", map.len());
let result = gbt::gbt(&mut map, max_uid);
info!("Finished gbt algorithm for {} elements...", map.len());
debug!(
"Releasing lock for thread_transactions from thread {:?}...",
std::thread::current().id()
);
drop(map);
Ok(result)
});
handle
.await
.map_err(|_| napi::Error::from_reason("thread panicked"))?
}

View file

@ -0,0 +1,13 @@
use napi_derive::napi;
#[derive(Debug)]
#[napi(object)]
pub struct ThreadTransaction {
pub uid: u32,
pub order: u32,
pub fee: f64,
pub weight: u32,
pub sigops: u32,
pub effective_fee_per_vsize: f64,
pub inputs: Vec<u32>,
}

View file

@ -0,0 +1,132 @@
use priority_queue::PriorityQueue;
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
hash::{BuildHasher, Hasher},
};
/// This is the only way to create a `HashMap` with the `U32HasherState` and capacity
pub fn u32hashmap_with_capacity<V>(capacity: usize) -> HashMap<u32, V, U32HasherState> {
HashMap::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `PriorityQueue` with the `U32HasherState` and capacity
pub fn u32priority_queue_with_capacity<V: Ord>(
capacity: usize,
) -> PriorityQueue<u32, V, U32HasherState> {
PriorityQueue::with_capacity_and_hasher(capacity, U32HasherState(()))
}
/// This is the only way to create a `HashSet` with the `U32HasherState`
pub fn u32hashset_new() -> HashSet<u32, U32HasherState> {
HashSet::with_hasher(U32HasherState(()))
}
/// A private unit type is contained so no one can make an instance of it.
#[derive(Clone)]
pub struct U32HasherState(());
impl Debug for U32HasherState {
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
impl BuildHasher for U32HasherState {
type Hasher = U32Hasher;
fn build_hasher(&self) -> Self::Hasher {
U32Hasher(0)
}
}
/// This also can't be created outside this module due to private field.
pub struct U32Hasher(u32);
impl Hasher for U32Hasher {
fn finish(&self) -> u64 {
// Safety: Two u32s next to each other will make a u64
bytemuck::cast([self.0, 0])
}
fn write(&mut self, bytes: &[u8]) {
// Assert in debug builds (testing too) that only 4 byte keys (u32, i32, f32, etc.) run
debug_assert!(bytes.len() == 4);
// Safety: We know that the size of the key is 4 bytes
// We also know that the only way to get an instance of HashMap using this "hasher"
// is through the public functions in this module which set the key type to u32.
self.0 = *bytemuck::from_bytes(bytes);
}
}
#[cfg(test)]
mod tests {
use super::U32HasherState;
use priority_queue::PriorityQueue;
use std::collections::HashMap;
#[test]
fn test_hashmap() {
let mut hm: HashMap<u32, String, U32HasherState> = HashMap::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
hm.insert(0, String::from("0"));
hm.insert(42, String::from("42"));
hm.insert(256, String::from("256"));
hm.insert(u32::MAX, String::from("MAX"));
hm.insert(u32::MAX >> 2, String::from("MAX >> 2"));
assert_eq!(hm.get(&0), Some(&String::from("0")));
assert_eq!(hm.get(&42), Some(&String::from("42")));
assert_eq!(hm.get(&256), Some(&String::from("256")));
assert_eq!(hm.get(&u32::MAX), Some(&String::from("MAX")));
assert_eq!(hm.get(&(u32::MAX >> 2)), Some(&String::from("MAX >> 2")));
assert_eq!(hm.get(&(u32::MAX >> 4)), None);
assert_eq!(hm.get(&3), None);
assert_eq!(hm.get(&43), None);
}
#[test]
fn test_priority_queue() {
let mut pq: PriorityQueue<u32, i32, U32HasherState> =
PriorityQueue::with_hasher(U32HasherState(()));
// Testing basic operations with the custom hasher
assert_eq!(pq.push(1, 5), None);
assert_eq!(pq.push(2, -10), None);
assert_eq!(pq.push(3, 7), None);
assert_eq!(pq.push(4, 20), None);
assert_eq!(pq.push(u32::MAX, -42), None);
assert_eq!(pq.push_increase(1, 4), Some(4));
assert_eq!(pq.push_increase(2, -8), Some(-10));
assert_eq!(pq.push_increase(3, 5), Some(5));
assert_eq!(pq.push_increase(4, 21), Some(20));
assert_eq!(pq.push_increase(u32::MAX, -99), Some(-99));
assert_eq!(pq.push_increase(42, 1337), None);
assert_eq!(pq.push_decrease(1, 4), Some(5));
assert_eq!(pq.push_decrease(2, -10), Some(-8));
assert_eq!(pq.push_decrease(3, 5), Some(7));
assert_eq!(pq.push_decrease(4, 20), Some(21));
assert_eq!(pq.push_decrease(u32::MAX, 100), Some(100));
assert_eq!(pq.push_decrease(69, 420), None);
assert_eq!(pq.peek(), Some((&42, &1337)));
assert_eq!(pq.pop(), Some((42, 1337)));
assert_eq!(pq.peek(), Some((&69, &420)));
assert_eq!(pq.pop(), Some((69, 420)));
assert_eq!(pq.peek(), Some((&4, &20)));
assert_eq!(pq.pop(), Some((4, 20)));
assert_eq!(pq.peek(), Some((&3, &5)));
assert_eq!(pq.pop(), Some((3, 5)));
assert_eq!(pq.peek(), Some((&1, &4)));
assert_eq!(pq.pop(), Some((1, 4)));
assert_eq!(pq.peek(), Some((&2, &-10)));
assert_eq!(pq.pop(), Some((2, -10)));
assert_eq!(pq.peek(), Some((&u32::MAX, &-42)));
assert_eq!(pq.pop(), Some((u32::MAX, -42)));
assert_eq!(pq.peek(), None);
assert_eq!(pq.pop(), None);
}
}

View file

@ -27,6 +27,7 @@
"AUDIT": true, "AUDIT": true,
"ADVANCED_GBT_AUDIT": true, "ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true, "ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": false,
"CPFP_INDEXING": true, "CPFP_INDEXING": true,
"MAX_BLOCKS_BULK_QUERY": 999, "MAX_BLOCKS_BULK_QUERY": 999,
"DISK_CACHE_BLOCK_INTERVAL": 999 "DISK_CACHE_BLOCK_INTERVAL": 999

View file

@ -40,6 +40,7 @@ describe('Mempool Backend Config', () => {
AUDIT: false, AUDIT: false,
ADVANCED_GBT_AUDIT: false, ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false, ADVANCED_GBT_MEMPOOL: false,
RUST_GBT: false,
CPFP_INDEXING: false, CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0, MAX_BLOCKS_BULK_QUERY: 0,
DISK_CACHE_BLOCK_INTERVAL: 6, DISK_CACHE_BLOCK_INTERVAL: 6,

View file

@ -0,0 +1,68 @@
import fs from 'fs';
import { GbtGenerator, ThreadTransaction } from '../../../rust-gbt';
import path from 'path';
const baseline = require('./test-data/target-template.json');
const testVector = require('./test-data/test-data-ids.json');
const vectorUidMap: Map<number, string> = new Map(testVector.map(x => [x[0], x[1]]));
const vectorTxidMap: Map<string, number> = new Map(testVector.map(x => [x[1], x[0]]));
// Note that this test buffer is specially constructed
// such that uids are assigned in numerical txid order
// so that ties break the same way as in Core's implementation
const vectorBuffer: Buffer = fs.readFileSync(path.join(__dirname, './', './test-data/test-buffer.bin'));
describe('Rust GBT', () => {
test('should produce the same template as getBlockTemplate from Bitcoin Core', async () => {
const rustGbt = new GbtGenerator();
const { mempool, maxUid } = mempoolFromArrayBuffer(vectorBuffer.buffer);
const result = await rustGbt.make(mempool, maxUid);
const blocks: [string, number][][] = result.blocks.map(block => {
return block.map(uid => [vectorUidMap.get(uid) || 'missing', uid]);
});
const template = baseline.map(tx => [tx.txid, vectorTxidMap.get(tx.txid)]);
expect(blocks[0].length).toEqual(baseline.length);
expect(blocks[0]).toEqual(template);
});
});
function mempoolFromArrayBuffer(buf: ArrayBuffer): { mempool: ThreadTransaction[], maxUid: number } {
let maxUid = 0;
const view = new DataView(buf);
const count = view.getUint32(0, false);
const txs: ThreadTransaction[] = [];
let offset = 4;
for (let i = 0; i < count; i++) {
const uid = view.getUint32(offset, false);
maxUid = Math.max(maxUid, uid);
const tx: ThreadTransaction = {
uid,
order: txidToOrdering(vectorUidMap.get(uid) as string),
fee: view.getFloat64(offset + 4, false),
weight: view.getUint32(offset + 12, false),
sigops: view.getUint32(offset + 16, false),
// feePerVsize: view.getFloat64(offset + 20, false),
effectiveFeePerVsize: view.getFloat64(offset + 28, false),
inputs: [],
};
const numInputs = view.getUint32(offset + 36, false);
offset += 40;
for (let j = 0; j < numInputs; j++) {
tx.inputs.push(view.getUint32(offset, false));
offset += 4;
}
txs.push(tx);
}
return { mempool: txs, maxUid };
}
function txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}

File diff suppressed because it is too large Load diff

Binary file not shown.

File diff suppressed because one or more lines are too long

View file

@ -34,7 +34,7 @@ class Blocks {
private lastDifficultyAdjustmentTime = 0; private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0; private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = []; private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = []; private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>)[] = [];
private mainLoopTimeout: number = 120000; private mainLoopTimeout: number = 120000;
@ -60,7 +60,7 @@ class Blocks {
this.newBlockCallbacks.push(fn); this.newBlockCallbacks.push(fn);
} }
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) { public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn); this.newAsyncBlockCallbacks.push(fn);
} }
@ -642,7 +642,7 @@ class Blocks {
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2); const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
const block = BitcoinApi.convertBlock(verboseBlock); const block = BitcoinApi.convertBlock(verboseBlock);
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash); const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, false, true); const transactions = await this.$getTransactionsExtended(blockHash, block.height, false, false, true) as MempoolTransactionExtended[];
if (config.MEMPOOL.BACKEND !== 'esplora') { if (config.MEMPOOL.BACKEND !== 'esplora') {
// fill in missing transaction fee data from verboseBlock // fill in missing transaction fee data from verboseBlock
for (let i = 0; i < transactions.length; i++) { for (let i = 0; i < transactions.length; i++) {

View file

@ -195,6 +195,7 @@ class DiskCache {
if (data.mempoolArray) { if (data.mempoolArray) {
for (const tx of data.mempoolArray) { for (const tx of data.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx; data.mempool[tx.txid] = tx;
} }
} }
@ -207,6 +208,7 @@ class DiskCache {
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8')); const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
if (cacheData2.mempoolArray) { if (cacheData2.mempoolArray) {
for (const tx of cacheData2.mempoolArray) { for (const tx of cacheData2.mempoolArray) {
delete tx.uid;
data.mempool[tx.txid] = tx; data.mempool[tx.txid] = tx;
} }
} else { } else {

View file

@ -1,3 +1,4 @@
import { GbtGenerator, GbtResult, ThreadTransaction as RustThreadTransaction } from '../../rust-gbt';
import logger from '../logger'; import logger from '../logger';
import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces'; import { MempoolBlock, MempoolTransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor, CompactThreadTransaction, EffectiveFeeStats } from '../mempool.interfaces';
import { Common, OnlineFeeStatsCalculator } from './common'; import { Common, OnlineFeeStatsCalculator } from './common';
@ -5,16 +6,18 @@ import config from '../config';
import { Worker } from 'worker_threads'; import { Worker } from 'worker_threads';
import path from 'path'; import path from 'path';
const MAX_UINT32 = Math.pow(2, 32) - 1;
class MempoolBlocks { class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = []; private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = []; private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private txSelectionWorker: Worker | null = null; private txSelectionWorker: Worker | null = null;
private rustInitialized: boolean = false;
private rustGbtGenerator: GbtGenerator = new GbtGenerator();
private nextUid: number = 1; private nextUid: number = 1;
private uidMap: Map<number, string> = new Map(); // map short numerical uids to full txids private uidMap: Map<number, string> = new Map(); // map short numerical uids to full txids
constructor() {}
public getMempoolBlocks(): MempoolBlock[] { public getMempoolBlocks(): MempoolBlock[] {
return this.mempoolBlocks.map((block) => { return this.mempoolBlocks.map((block) => {
return { return {
@ -40,9 +43,7 @@ class MempoolBlocks {
const latestMempool = memPool; const latestMempool = memPool;
const memPoolArray: MempoolTransactionExtended[] = []; const memPoolArray: MempoolTransactionExtended[] = [];
for (const i in latestMempool) { for (const i in latestMempool) {
if (latestMempool.hasOwnProperty(i)) { memPoolArray.push(latestMempool[i]);
memPoolArray.push(latestMempool[i]);
}
} }
const start = new Date().getTime(); const start = new Date().getTime();
@ -218,16 +219,17 @@ class MempoolBlocks {
// to reduce the overhead of passing this data to the worker thread // to reduce the overhead of passing this data to the worker thread
const strippedMempool: Map<number, CompactThreadTransaction> = new Map(); const strippedMempool: Map<number, CompactThreadTransaction> = new Map();
Object.values(newMempool).forEach(entry => { Object.values(newMempool).forEach(entry => {
if (entry.uid != null) { if (entry.uid !== null && entry.uid !== undefined) {
strippedMempool.set(entry.uid, { const stripped = {
uid: entry.uid, uid: entry.uid,
fee: entry.fee, fee: entry.fee,
weight: (entry.adjustedVsize * 4), weight: (entry.adjustedVsize * 4),
sigops: entry.sigops, sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize, feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize, effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => uid != null) as number[], inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
}); };
strippedMempool.set(entry.uid, stripped);
} }
}); });
@ -260,8 +262,10 @@ class MempoolBlocks {
// clean up thread error listener // clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener); this.txSelectionWorker?.removeListener('error', threadErrorListener);
const processed = this.processBlockTemplates(newMempool, blocks, rates, clusters, saveResults); const processed = this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`); logger.debug(`makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed; return processed;
} catch (e) { } catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e)); logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
@ -279,12 +283,12 @@ class MempoolBlocks {
const start = Date.now(); const start = Date.now();
for (const tx of Object.values(added)) { for (const tx of Object.values(added)) {
this.setUid(tx); this.setUid(tx, true);
} }
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => uid != null) as number[]; const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// prepare a stripped down version of the mempool with only the minimum necessary data // prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread // to reduce the overhead of passing this data to the worker thread
const addedStripped: CompactThreadTransaction[] = added.filter(entry => entry.uid != null).map(entry => { const addedStripped: CompactThreadTransaction[] = added.filter(entry => (entry.uid !== null && entry.uid !== undefined)).map(entry => {
return { return {
uid: entry.uid || 0, uid: entry.uid || 0,
fee: entry.fee, fee: entry.fee,
@ -292,7 +296,7 @@ class MempoolBlocks {
sigops: entry.sigops, sigops: entry.sigops,
feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize, feePerVsize: entry.adjustedFeePerVsize || entry.feePerVsize,
effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize, effectiveFeePerVsize: entry.effectiveFeePerVsize || entry.adjustedFeePerVsize || entry.feePerVsize,
inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => uid != null) as number[], inputs: entry.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[],
}; };
}); });
@ -314,84 +318,131 @@ class MempoolBlocks {
// clean up thread error listener // clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener); this.txSelectionWorker?.removeListener('error', threadErrorListener);
this.processBlockTemplates(newMempool, blocks, rates, clusters, saveResults); this.processBlockTemplates(newMempool, blocks, null, Object.entries(rates), Object.values(clusters), saveResults);
logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`); logger.debug(`updateBlockTemplates completed in ${(Date.now() - start) / 1000} seconds`);
} catch (e) { } catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e)); logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
} }
} }
private processBlockTemplates(mempool, blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }, saveResults): MempoolBlockWithTransactions[] { private resetRustGbt(): void {
for (const txid of Object.keys(rates)) { this.rustInitialized = false;
this.rustGbtGenerator = new GbtGenerator();
}
private async $rustMakeBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
const start = Date.now();
// reset mempool short ids
if (saveResults) {
this.resetUids();
}
// set missing short ids
for (const tx of Object.values(newMempool)) {
this.setUid(tx, !saveResults);
}
// set short ids for transaction inputs
for (const tx of Object.values(newMempool)) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
// run the block construction algorithm in a separate thread, and wait for a result
const rustGbt = saveResults ? this.rustGbtGenerator : new GbtGenerator();
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await rustGbt.make(Object.values(newMempool) as RustThreadTransaction[], this.nextUid),
);
if (saveResults) {
this.rustInitialized = true;
}
const processed = this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, saveResults);
logger.debug(`RUST makeBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
return processed;
} catch (e) {
logger.err('RUST makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
if (saveResults) {
this.resetRustGbt();
}
}
return this.mempoolBlocks;
}
public async $oneOffRustBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }): Promise<MempoolBlockWithTransactions[]> {
return this.$rustMakeBlockTemplates(newMempool, false);
}
public async $rustUpdateBlockTemplates(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number, added: MempoolTransactionExtended[], removed: MempoolTransactionExtended[]): Promise<void> {
// GBT optimization requires that uids never get too sparse
// as a sanity check, we should also explicitly prevent uint32 uid overflow
if (this.nextUid + added.length >= Math.min(Math.max(262144, 2 * mempoolSize), MAX_UINT32)) {
this.resetRustGbt();
}
if (!this.rustInitialized) {
// need to reset the worker
await this.$rustMakeBlockTemplates(newMempool, true);
return;
}
const start = Date.now();
// set missing short ids
for (const tx of added) {
this.setUid(tx, true);
}
// set short ids for transaction inputs
for (const tx of added) {
tx.inputs = tx.vin.map(v => this.getUid(newMempool[v.txid])).filter(uid => (uid !== null && uid !== undefined)) as number[];
}
const removedUids = removed.map(tx => this.getUid(tx)).filter(uid => (uid !== null && uid !== undefined)) as number[];
// run the block construction algorithm in a separate thread, and wait for a result
try {
const { blocks, blockWeights, rates, clusters } = this.convertNapiResultTxids(
await this.rustGbtGenerator.update(
added as RustThreadTransaction[],
removedUids,
this.nextUid,
),
);
const resultMempoolSize = blocks.reduce((total, block) => total + block.length, 0);
if (mempoolSize !== resultMempoolSize) {
throw new Error('GBT returned wrong number of transactions, cache is probably out of sync');
} else {
this.processBlockTemplates(newMempool, blocks, blockWeights, rates, clusters, true);
}
this.removeUids(removedUids);
logger.debug(`RUST updateBlockTemplates completed in ${(Date.now() - start)/1000} seconds`);
} catch (e) {
logger.err('RUST updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
this.resetRustGbt();
}
}
private processBlockTemplates(mempool: { [txid: string]: MempoolTransactionExtended }, blocks: string[][], blockWeights: number[] | null, rates: [string, number][], clusters: string[][], saveResults): MempoolBlockWithTransactions[] {
for (const [txid, rate] of rates) {
if (txid in mempool) { if (txid in mempool) {
mempool[txid].effectiveFeePerVsize = rates[txid]; mempool[txid].effectiveFeePerVsize = rate;
mempool[txid].cpfpChecked = false;
} }
} }
const lastBlockIndex = blocks.length - 1;
let hasBlockStack = blocks.length >= 8; let hasBlockStack = blocks.length >= 8;
let stackWeight; let stackWeight;
let feeStatsCalculator: OnlineFeeStatsCalculator | void; let feeStatsCalculator: OnlineFeeStatsCalculator | void;
if (hasBlockStack) { if (hasBlockStack) {
stackWeight = blocks[blocks.length - 1].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0); if (blockWeights && blockWeights[7] !== null) {
stackWeight = blockWeights[7];
} else {
stackWeight = blocks[lastBlockIndex].reduce((total, tx) => total + (mempool[tx]?.weight || 0), 0);
}
hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS; hasBlockStack = stackWeight > config.MEMPOOL.BLOCK_WEIGHT_UNITS;
feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]); feeStatsCalculator = new OnlineFeeStatsCalculator(stackWeight, 0.5, [10, 20, 30, 40, 50, 60, 70, 80, 90]);
} }
const readyBlocks: { transactionIds, transactions, totalSize, totalWeight, totalFees, feeStats }[] = []; for (const cluster of clusters) {
const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
// update this thread's mempool with the results
for (let blockIndex = 0; blockIndex < blocks.length; blockIndex++) {
const block: string[] = blocks[blockIndex];
let txid: string;
let mempoolTx: MempoolTransactionExtended;
let totalSize = 0;
let totalVsize = 0;
let totalWeight = 0;
let totalFees = 0;
const transactions: MempoolTransactionExtended[] = [];
for (let txIndex = 0; txIndex < block.length; txIndex++) {
txid = block[txIndex];
if (txid) {
mempoolTx = mempool[txid];
// save position in projected blocks
mempoolTx.position = {
block: blockIndex,
vsize: totalVsize + (mempoolTx.vsize / 2),
};
mempoolTx.ancestors = [];
mempoolTx.descendants = [];
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
// online calculation of stack-of-blocks fee stats
if (hasBlockStack && blockIndex === blocks.length - 1 && feeStatsCalculator) {
feeStatsCalculator.processNext(mempoolTx);
}
totalSize += mempoolTx.size;
totalVsize += mempoolTx.vsize;
totalWeight += mempoolTx.weight;
totalFees += mempoolTx.fee;
if (totalVsize <= sizeLimit) {
transactions.push(mempoolTx);
}
}
}
readyBlocks.push({
transactionIds: block,
transactions,
totalSize,
totalWeight,
totalFees,
feeStats: (hasBlockStack && blockIndex === blocks.length - 1 && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
});
}
for (const cluster of Object.values(clusters)) {
for (const memberTxid of cluster) { for (const memberTxid of cluster) {
if (memberTxid in mempool) { const mempoolTx = mempool[memberTxid];
const mempoolTx = mempool[memberTxid]; if (mempoolTx) {
const ancestors: Ancestor[] = []; const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = []; const descendants: Ancestor[] = [];
let matched = false; let matched = false;
@ -411,15 +462,62 @@ class MempoolBlocks {
} }
} }
}); });
mempoolTx.ancestors = ancestors; Object.assign(mempoolTx, {ancestors, descendants, bestDescendant: null, cpfpChecked: true});
mempoolTx.descendants = descendants;
mempoolTx.bestDescendant = null;
} }
} }
} }
const mempoolBlocks = readyBlocks.map((b, index) => { const sizeLimit = (config.MEMPOOL.BLOCK_WEIGHT_UNITS / 4) * 1.2;
return this.dataToMempoolBlocks(b.transactionIds, b.transactions, b.totalSize, b.totalWeight, b.totalFees, b.feeStats); // update this thread's mempool with the results
let mempoolTx: MempoolTransactionExtended;
const mempoolBlocks: MempoolBlockWithTransactions[] = blocks.map((block, blockIndex) => {
let totalSize = 0;
let totalVsize = 0;
let totalWeight = 0;
let totalFees = 0;
const transactions: MempoolTransactionExtended[] = [];
for (const txid of block) {
if (txid) {
mempoolTx = mempool[txid];
// save position in projected blocks
mempoolTx.position = {
block: blockIndex,
vsize: totalVsize + (mempoolTx.vsize / 2),
};
if (!mempoolTx.cpfpChecked) {
if (mempoolTx.ancestors?.length) {
mempoolTx.ancestors = [];
}
if (mempoolTx.descendants?.length) {
mempoolTx.descendants = [];
}
mempoolTx.bestDescendant = null;
mempoolTx.cpfpChecked = true;
}
// online calculation of stack-of-blocks fee stats
if (hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) {
feeStatsCalculator.processNext(mempoolTx);
}
totalSize += mempoolTx.size;
totalVsize += mempoolTx.vsize;
totalWeight += mempoolTx.weight;
totalFees += mempoolTx.fee;
if (totalVsize <= sizeLimit) {
transactions.push(mempoolTx);
}
}
}
return this.dataToMempoolBlocks(
block,
transactions,
totalSize,
totalWeight,
totalFees,
(hasBlockStack && blockIndex === lastBlockIndex && feeStatsCalculator) ? feeStatsCalculator.getRawFeeStats() : undefined,
);
}); });
if (saveResults) { if (saveResults) {
@ -452,16 +550,20 @@ class MempoolBlocks {
this.nextUid = 1; this.nextUid = 1;
} }
private setUid(tx: MempoolTransactionExtended): number { private setUid(tx: MempoolTransactionExtended, skipSet = false): number {
const uid = this.nextUid; if (tx.uid === null || tx.uid === undefined || !skipSet) {
this.nextUid++; const uid = this.nextUid;
this.uidMap.set(uid, tx.txid); this.nextUid++;
tx.uid = uid; this.uidMap.set(uid, tx.txid);
return uid; tx.uid = uid;
return uid;
} else {
return tx.uid;
}
} }
private getUid(tx: MempoolTransactionExtended): number | void { private getUid(tx: MempoolTransactionExtended): number | void {
if (tx?.uid != null && this.uidMap.has(tx.uid)) { if (tx?.uid !== null && tx?.uid !== undefined && this.uidMap.has(tx.uid)) {
return tx.uid; return tx.uid;
} }
} }
@ -496,6 +598,28 @@ class MempoolBlocks {
} }
return { blocks: convertedBlocks, rates: convertedRates, clusters: convertedClusters } as { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }}; return { blocks: convertedBlocks, rates: convertedRates, clusters: convertedClusters } as { blocks: string[][], rates: { [root: string]: number }, clusters: { [root: string]: string[] }};
} }
private convertNapiResultTxids({ blocks, blockWeights, rates, clusters }: GbtResult)
: { blocks: string[][], blockWeights: number[], rates: [string, number][], clusters: string[][] } {
const convertedBlocks: string[][] = blocks.map(block => block.map(uid => {
const txid = this.uidMap.get(uid);
if (txid !== undefined) {
return txid;
} else {
throw new Error('GBT returned a block containing a transaction with unknown uid');
}
}));
const convertedRates: [string, number][] = [];
for (const [rateUid, rate] of rates) {
const rateTxid = this.uidMap.get(rateUid) as string;
convertedRates.push([rateTxid, rate]);
}
const convertedClusters: string[][] = [];
for (const cluster of clusters) {
convertedClusters.push(cluster.map(uid => this.uidMap.get(uid)) as string[]);
}
return { blocks: convertedBlocks, blockWeights, rates: convertedRates, clusters: convertedClusters };
}
} }
export default new MempoolBlocks(); export default new MempoolBlocks();

View file

@ -19,7 +19,7 @@ class Mempool {
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 }; maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[], private mempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => void) | undefined; deletedTransactions: MempoolTransactionExtended[]) => void) | undefined;
private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, newTransactions: MempoolTransactionExtended[], private $asyncMempoolChangedCallback: ((newMempool: {[txId: string]: MempoolTransactionExtended; }, mempoolSize: number, newTransactions: MempoolTransactionExtended[],
deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined; deletedTransactions: MempoolTransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = []; private txPerSecondArray: number[] = [];
@ -69,7 +69,7 @@ class Mempool {
this.mempoolChangedCallback = fn; this.mempoolChangedCallback = fn;
} }
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: MempoolTransactionExtended; }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void { newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]) => Promise<void>): void {
this.$asyncMempoolChangedCallback = fn; this.$asyncMempoolChangedCallback = fn;
} }
@ -84,16 +84,21 @@ class Mempool {
public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) { public async $setMempool(mempoolData: { [txId: string]: MempoolTransactionExtended }) {
this.mempoolCache = mempoolData; this.mempoolCache = mempoolData;
let count = 0;
for (const txid of Object.keys(this.mempoolCache)) { for (const txid of Object.keys(this.mempoolCache)) {
if (this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) { if (this.mempoolCache[txid].sigops == null || this.mempoolCache[txid].effectiveFeePerVsize == null) {
this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]); this.mempoolCache[txid] = transactionUtils.extendMempoolTransaction(this.mempoolCache[txid]);
} }
if (this.mempoolCache[txid].order == null) {
this.mempoolCache[txid].order = transactionUtils.txidToOrdering(txid);
}
count++;
} }
if (this.mempoolChangedCallback) { if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []); this.mempoolChangedCallback(this.mempoolCache, [], []);
} }
if (this.$asyncMempoolChangedCallback) { if (this.$asyncMempoolChangedCallback) {
await this.$asyncMempoolChangedCallback(this.mempoolCache, [], []); await this.$asyncMempoolChangedCallback(this.mempoolCache, count, [], []);
} }
this.addToSpendMap(Object.values(this.mempoolCache)); this.addToSpendMap(Object.values(this.mempoolCache));
} }
@ -237,23 +242,24 @@ class Mempool {
} }
} }
const newMempoolSize = currentMempoolSize + newTransactions.length - deletedTransactions.length;
const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx)); const newTransactionsStripped = newTransactions.map((tx) => Common.stripTransaction(tx));
this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6); this.latestTransactions = newTransactionsStripped.concat(this.latestTransactions).slice(0, 6);
if (!this.inSync && transactions.length === Object.keys(this.mempoolCache).length) { if (!this.inSync && transactions.length === newMempoolSize) {
this.inSync = true; this.inSync = true;
logger.notice('The mempool is now in sync!'); logger.notice('The mempool is now in sync!');
loadingIndicators.setProgress('mempool', 100); loadingIndicators.setProgress('mempool', 100);
} }
this.mempoolCacheDelta = Math.abs(transactions.length - Object.keys(this.mempoolCache).length); this.mempoolCacheDelta = Math.abs(transactions.length - newMempoolSize);
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) { if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions); this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
} }
if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) { if (this.$asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.updateTimerProgress(timer, 'running async mempool callback'); this.updateTimerProgress(timer, 'running async mempool callback');
await this.$asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions); await this.$asyncMempoolChangedCallback(this.mempoolCache, newMempoolSize, newTransactions, deletedTransactions);
this.updateTimerProgress(timer, 'completed async mempool callback'); this.updateTimerProgress(timer, 'completed async mempool callback');
} }

View file

@ -76,6 +76,7 @@ class TransactionUtils {
const adjustedFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1, const adjustedFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1,
(transaction.fee || 0) / adjustedVsize); (transaction.fee || 0) / adjustedVsize);
const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, { const transactionExtended: MempoolTransactionExtended = Object.assign(transaction, {
order: this.txidToOrdering(transaction.txid),
vsize: Math.round(transaction.weight / 4), vsize: Math.round(transaction.weight / 4),
adjustedVsize, adjustedVsize,
sigops, sigops,
@ -154,6 +155,17 @@ class TransactionUtils {
return sigops; return sigops;
} }
// returns the most significant 4 bytes of the txid as an integer
public txidToOrdering(txid: string): number {
return parseInt(
txid.substr(62, 2) +
txid.substr(60, 2) +
txid.substr(58, 2) +
txid.substr(56, 2),
16
);
}
} }
export default new TransactionUtils(); export default new TransactionUtils();

View file

@ -333,7 +333,7 @@ class WebsocketHandler {
}); });
} }
async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, async $handleMempoolChange(newMempool: { [txid: string]: MempoolTransactionExtended }, mempoolSize: number,
newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> { newTransactions: MempoolTransactionExtended[], deletedTransactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) { if (!this.wss) {
throw new Error('WebSocket.Server is not set'); throw new Error('WebSocket.Server is not set');
@ -342,7 +342,11 @@ class WebsocketHandler {
this.printLogs(); this.printLogs();
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) { if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true); if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(newMempool, mempoolSize, newTransactions, deletedTransactions);
} else {
await mempoolBlocks.$updateBlockTemplates(newMempool, newTransactions, deletedTransactions, true);
}
} else { } else {
mempoolBlocks.updateMempoolBlocks(newMempool, true); mempoolBlocks.updateMempoolBlocks(newMempool, true);
} }
@ -570,7 +574,7 @@ class WebsocketHandler {
}); });
} }
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> { async handleNewBlock(block: BlockExtended, txIds: string[], transactions: MempoolTransactionExtended[]): Promise<void> {
if (!this.wss) { if (!this.wss) {
throw new Error('WebSocket.Server is not set'); throw new Error('WebSocket.Server is not set');
} }
@ -588,7 +592,11 @@ class WebsocketHandler {
if (separateAudit) { if (separateAudit) {
auditMempool = deepClone(_memPool); auditMempool = deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) { if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false); if (config.MEMPOOL.RUST_GBT) {
projectedBlocks = await mempoolBlocks.$oneOffRustBlockTemplates(auditMempool);
} else {
projectedBlocks = await mempoolBlocks.$makeBlockTemplates(auditMempool, false);
}
} else { } else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false); projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
} }
@ -655,7 +663,11 @@ class WebsocketHandler {
} }
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) { if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.$makeBlockTemplates(_memPool, true); if (config.MEMPOOL.RUST_GBT) {
await mempoolBlocks.$rustUpdateBlockTemplates(_memPool, Object.keys(_memPool).length, [], transactions);
} else {
await mempoolBlocks.$makeBlockTemplates(_memPool, true);
}
} else { } else {
mempoolBlocks.updateMempoolBlocks(_memPool, true); mempoolBlocks.updateMempoolBlocks(_memPool, true);
} }

View file

@ -31,6 +31,7 @@ interface IConfig {
AUDIT: boolean; AUDIT: boolean;
ADVANCED_GBT_AUDIT: boolean; ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean; ADVANCED_GBT_MEMPOOL: boolean;
RUST_GBT: boolean;
CPFP_INDEXING: boolean; CPFP_INDEXING: boolean;
MAX_BLOCKS_BULK_QUERY: number; MAX_BLOCKS_BULK_QUERY: number;
DISK_CACHE_BLOCK_INTERVAL: number; DISK_CACHE_BLOCK_INTERVAL: number;
@ -160,6 +161,7 @@ const defaults: IConfig = {
'AUDIT': false, 'AUDIT': false,
'ADVANCED_GBT_AUDIT': false, 'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false, 'ADVANCED_GBT_MEMPOOL': false,
'RUST_GBT': false,
'CPFP_INDEXING': false, 'CPFP_INDEXING': false,
'MAX_BLOCKS_BULK_QUERY': 0, 'MAX_BLOCKS_BULK_QUERY': 0,
'DISK_CACHE_BLOCK_INTERVAL': 6, 'DISK_CACHE_BLOCK_INTERVAL': 6,

View file

@ -94,9 +94,11 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
} }
export interface MempoolTransactionExtended extends TransactionExtended { export interface MempoolTransactionExtended extends TransactionExtended {
order: number;
sigops: number; sigops: number;
adjustedVsize: number; adjustedVsize: number;
adjustedFeePerVsize: number; adjustedFeePerVsize: number;
inputs?: number[];
} }
export interface AuditTransaction { export interface AuditTransaction {
@ -126,9 +128,9 @@ export interface CompactThreadTransaction {
weight: number; weight: number;
sigops: number; sigops: number;
feePerVsize: number; feePerVsize: number;
effectiveFeePerVsize?: number; effectiveFeePerVsize: number;
inputs: number[]; inputs: number[];
cpfpRoot?: string; cpfpRoot?: number;
cpfpChecked?: boolean; cpfpChecked?: boolean;
dirty?: boolean; dirty?: boolean;
} }

View file

@ -7,7 +7,12 @@ WORKDIR /build
COPY . . COPY . .
RUN apt-get update RUN apt-get update
RUN apt-get install -y build-essential python3 pkg-config RUN apt-get install -y build-essential python3 pkg-config curl
# Install Rust via rustup
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
ENV PATH="/root/.cargo/bin:$PATH"
RUN npm install --omit=dev --omit=optional RUN npm install --omit=dev --omit=optional
RUN npm run package RUN npm run package

View file

@ -25,6 +25,7 @@
"AUDIT": __MEMPOOL_AUDIT__, "AUDIT": __MEMPOOL_AUDIT__,
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__, "ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__, "ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
"RUST_GBT": __MEMPOOL_RUST_GBT__,
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__, "CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__, "MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__,
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__, "DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__,

View file

@ -28,6 +28,7 @@ __MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.githu
__MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false} __MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false} __MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false} __MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
__MEMPOOL_RUST_GBT__=${MEMPOOL_RUST_GBT:=false}
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false} __MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0} __MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6} __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
@ -155,6 +156,7 @@ sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-co
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_RUST_GBT__!${__MEMPOOL_GBT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json

View file

@ -14,6 +14,7 @@
"CPFP_INDEXING": true, "CPFP_INDEXING": true,
"ADVANCED_GBT_AUDIT": true, "ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true, "ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": true,
"USE_SECOND_NODE_FOR_MINFEE": true, "USE_SECOND_NODE_FOR_MINFEE": true,
"DISK_CACHE_BLOCK_INTERVAL": 1 "DISK_CACHE_BLOCK_INTERVAL": 1
}, },

View file

@ -10,6 +10,7 @@
"AUDIT": true, "AUDIT": true,
"ADVANCED_GBT_AUDIT": true, "ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true, "ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": true,
"POLL_RATE_MS": 1000, "POLL_RATE_MS": 1000,
"DISK_CACHE_BLOCK_INTERVAL": 1 "DISK_CACHE_BLOCK_INTERVAL": 1
}, },

View file

@ -10,6 +10,7 @@
"AUDIT": true, "AUDIT": true,
"ADVANCED_GBT_AUDIT": true, "ADVANCED_GBT_AUDIT": true,
"ADVANCED_GBT_MEMPOOL": true, "ADVANCED_GBT_MEMPOOL": true,
"RUST_GBT": true,
"POLL_RATE_MS": 1000, "POLL_RATE_MS": 1000,
"DISK_CACHE_BLOCK_INTERVAL": 1 "DISK_CACHE_BLOCK_INTERVAL": 1
}, },

1
rust-toolchain Normal file
View file

@ -0,0 +1 @@
1.70