mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2025-03-03 18:47:38 +01:00
Add logic to fetch block headers after compact filters are synced in IBD to avoid a stale tip (#5037)
* Add logic to fetch block headers after compact filters are synced in IBD to avoid a stale tip * Restore logging, remove println * Add comment linking to issue * Cleanup logging * Only send getheaders message after filter sync if we are in IBD
This commit is contained in:
parent
31e1bd79e9
commit
f95360f8ba
2 changed files with 49 additions and 4 deletions
|
@ -306,6 +306,38 @@ class NeutrinoNodeTest extends NodeTestWithCachedBitcoindPair {
|
|||
} yield {
|
||||
succeed
|
||||
}
|
||||
}
|
||||
|
||||
it must "sync block headers that occurred while were syncing compact filters during IBD" in {
|
||||
nodeConnectedWithBitcoind: NeutrinoNodeConnectedWithBitcoinds =>
|
||||
//see: https://github.com/bitcoin-s/bitcoin-s/issues/5017
|
||||
val node = nodeConnectedWithBitcoind.node
|
||||
val bitcoind = nodeConnectedWithBitcoind.bitcoinds(0)
|
||||
|
||||
//start syncing node
|
||||
val numBlocks = 5
|
||||
val startSyncF = node.sync()
|
||||
val genBlocksF = {
|
||||
for {
|
||||
_ <- startSyncF
|
||||
//give a little time for the sync to start
|
||||
_ <- AsyncUtil.nonBlockingSleep(500.milliseconds)
|
||||
//generate blocks while sync is ongoing
|
||||
_ <- bitcoind.generate(numBlocks)
|
||||
} yield {
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
for {
|
||||
_ <- genBlocksF
|
||||
//wait for sync to complete
|
||||
_ <- NodeTestUtil.awaitAllSync(node, bitcoind)
|
||||
//generate another block and make sure it syncs it
|
||||
_ <- bitcoind.generate(1)
|
||||
_ <- NodeTestUtil.awaitAllSync(node, bitcoind)
|
||||
} yield {
|
||||
succeed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -136,7 +136,7 @@ case class DataMessageHandler(
|
|||
newChainApi <- chainApi.processFilterHeaders(
|
||||
filterHeaders,
|
||||
filterHeader.stopHash.flip)
|
||||
(newSyncing, startFilterHeightOpt) <-
|
||||
(newSyncing, _) <-
|
||||
if (filterHeaders.size == chainConfig.filterHeaderBatchSize) {
|
||||
logger.debug(
|
||||
s"Received maximum amount of filter headers in one header message. This means we are not synced, requesting more")
|
||||
|
@ -150,7 +150,8 @@ case class DataMessageHandler(
|
|||
syncing <- sendFirstGetCompactFilterCommand(
|
||||
peerMsgSender,
|
||||
startHeightOpt).map { syncing =>
|
||||
if (!syncing) logger.info("We are synced")
|
||||
if (!syncing)
|
||||
logger.info("Compact filters are already synced")
|
||||
syncing
|
||||
}
|
||||
} yield (syncing, startHeightOpt)
|
||||
|
@ -499,7 +500,7 @@ case class DataMessageHandler(
|
|||
}
|
||||
|
||||
/** syncs filter headers in case the header chain is still ahead post filter sync */
|
||||
def syncIfHeadersAhead(
|
||||
private def syncIfHeadersAhead(
|
||||
peerMessageSender: PeerMessageSender): Future[Boolean] = {
|
||||
for {
|
||||
headerHeight <- chainApi.getBestHashBlockHeight()
|
||||
|
@ -523,7 +524,19 @@ case class DataMessageHandler(
|
|||
s"headerHeight=$headerHeight filterCount=$filterCount")
|
||||
logger.info(s"We are synced")
|
||||
Try(initialSyncDone.map(_.success(Done)))
|
||||
Future.successful(false)
|
||||
//check to see if we had blocks mined while IBD
|
||||
//was ongoing, see: https://github.com/bitcoin-s/bitcoin-s/issues/5036
|
||||
for {
|
||||
bestBlockHash <- chainApi.getBestBlockHash()
|
||||
isIBD <- chainApi.isIBD()
|
||||
_ <- {
|
||||
if (isIBD) {
|
||||
peerMessageSender.sendGetHeadersMessage(bestBlockHash.flip)
|
||||
} else {
|
||||
Future.unit
|
||||
}
|
||||
}
|
||||
} yield false
|
||||
}
|
||||
}
|
||||
} yield syncing
|
||||
|
|
Loading…
Add table
Reference in a new issue