mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2025-02-24 06:57:51 +01:00
Segregate mainnet chainhandler tests and regtest chainhandler tests (#1988)
This commit is contained in:
parent
5f7356c526
commit
4149271b77
3 changed files with 300 additions and 271 deletions
|
@ -1,7 +1,5 @@
|
|||
package org.bitcoins.chain.blockchain
|
||||
|
||||
import akka.actor.ActorSystem
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.chain.pow.Pow
|
||||
import org.bitcoins.core.api.chain.db.{
|
||||
BlockHeaderDb,
|
||||
|
@ -31,26 +29,11 @@ import org.scalatest.{Assertion, FutureOutcome}
|
|||
import play.api.libs.json.Json
|
||||
|
||||
import scala.concurrent.Future
|
||||
import scala.io.BufferedSource
|
||||
|
||||
class ChainHandlerTest extends ChainDbUnitTest {
|
||||
|
||||
override type FixtureParam = ChainHandler
|
||||
|
||||
implicit override val system = ActorSystem("ChainUnitTest")
|
||||
|
||||
// we're working with mainnet data
|
||||
implicit override lazy val appConfig: ChainAppConfig = mainnetAppConfig
|
||||
|
||||
val source: BufferedSource = FileUtil.getFileAsSource("block_headers.json")
|
||||
val arrStr: String = source.getLines.next
|
||||
source.close()
|
||||
|
||||
import org.bitcoins.commons.serializers.JsonReaders.BlockHeaderReads
|
||||
|
||||
val headersResult: Vector[BlockHeader] =
|
||||
Json.parse(arrStr).validate[Vector[BlockHeader]].get
|
||||
|
||||
override val defaultTag: ChainFixtureTag = ChainFixtureTag.GenisisChainHandler
|
||||
|
||||
override def withFixture(test: OneArgAsyncTest): FutureOutcome =
|
||||
|
@ -82,37 +65,6 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
|||
foundHeaderF.map(found => assert(found.get == newValidHeader))
|
||||
}
|
||||
|
||||
it must "have getBestBlockHash return the header with the most work, not the highest" in {
|
||||
tempHandler: ChainHandler =>
|
||||
val dummyHeader =
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val highestHeader =
|
||||
BlockHeaderDbHelper.fromBlockHeader(2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463)
|
||||
|
||||
val headerWithMostWork =
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(1000),
|
||||
ChainTestUtil.blockHeader562464)
|
||||
|
||||
val tallestBlockchain =
|
||||
Blockchain(Vector(highestHeader, dummyHeader, genesis))
|
||||
val mostWorkChain = Blockchain(Vector(headerWithMostWork, genesis))
|
||||
|
||||
val chainHandler =
|
||||
tempHandler.copy(blockchains = Vector(tallestBlockchain, mostWorkChain))
|
||||
|
||||
for {
|
||||
hash <- chainHandler.getBestBlockHash()
|
||||
} yield {
|
||||
assert(hash == headerWithMostWork.blockHeader.hashBE)
|
||||
}
|
||||
}
|
||||
|
||||
it must "have an in-order seed" in { _ =>
|
||||
val source = FileUtil.getFileAsSource("block_headers.json")
|
||||
val arrStr = source.getLines.next
|
||||
|
@ -135,55 +87,6 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
|||
succeed
|
||||
}
|
||||
|
||||
it must "be able to process and fetch real headers from mainnet" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val blockHeaders =
|
||||
headersResult.drop(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - ChainUnitTest.FIRST_BLOCK_HEIGHT)
|
||||
|
||||
val firstBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val secondBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463)
|
||||
|
||||
val thirdBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562464)
|
||||
|
||||
/*
|
||||
* We need to insert one block before the first POW check because it is used on the next
|
||||
* POW check. We then need to insert the next to blocks to circumvent a POW check since
|
||||
* that would require we have an old block in the Blockchain that we don't have.
|
||||
*/
|
||||
val firstThreeBlocks =
|
||||
Vector(firstBlockHeaderDb, secondBlockHeaderDb, thirdBlockHeaderDb)
|
||||
|
||||
val createdF = chainHandler.blockHeaderDAO.createAll(firstThreeBlocks)
|
||||
|
||||
createdF.flatMap { _ =>
|
||||
val blockchain = Blockchain.fromHeaders(firstThreeBlocks.reverse)
|
||||
val handler = ChainHandler(chainHandler.blockHeaderDAO,
|
||||
chainHandler.filterHeaderDAO,
|
||||
chainHandler.filterDAO,
|
||||
blockchain)
|
||||
val processorF = Future.successful(handler)
|
||||
// Takes way too long to do all blocks
|
||||
val blockHeadersToTest = blockHeaders.tail
|
||||
.take(
|
||||
(2 * chainHandler.chainConfig.chain.difficultyChangeInterval + 1).toInt)
|
||||
|
||||
processHeaders(processorF = processorF,
|
||||
headers = blockHeadersToTest,
|
||||
height = ChainUnitTest.FIRST_POW_CHANGE + 1)
|
||||
}
|
||||
}
|
||||
|
||||
it must "not fail ChainHandler.processHeaders() with empty headers collection" in {
|
||||
chainHandler: ChainHandler =>
|
||||
for {
|
||||
|
@ -191,62 +94,6 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
|||
} yield succeed
|
||||
}
|
||||
|
||||
it must "benchmark ChainHandler.processHeaders()" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val blockHeaders =
|
||||
headersResult.drop(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - ChainUnitTest.FIRST_BLOCK_HEIGHT)
|
||||
|
||||
val firstBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - 2,
|
||||
Pow.getBlockProof(ChainTestUtil.blockHeader562462),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val secondBlockHeaderDb = {
|
||||
val chainWork = firstBlockHeaderDb.chainWork + Pow.getBlockProof(
|
||||
ChainTestUtil.blockHeader562463)
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 1,
|
||||
chainWork,
|
||||
ChainTestUtil.blockHeader562463)
|
||||
}
|
||||
|
||||
val thirdBlockHeaderDb = {
|
||||
val chainWork = secondBlockHeaderDb.chainWork + Pow.getBlockProof(
|
||||
ChainTestUtil.blockHeader562464)
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE,
|
||||
chainWork,
|
||||
ChainTestUtil.blockHeader562464)
|
||||
}
|
||||
|
||||
/*
|
||||
* We need to insert one block before the first POW check because it is used on the next
|
||||
* POW check. We then need to insert the next to blocks to circumvent a POW check since
|
||||
* that would require we have an old block in the Blockchain that we don't have.
|
||||
*/
|
||||
val firstThreeBlocks =
|
||||
Vector(firstBlockHeaderDb, secondBlockHeaderDb, thirdBlockHeaderDb)
|
||||
|
||||
val createdF = chainHandler.blockHeaderDAO.createAll(firstThreeBlocks)
|
||||
|
||||
createdF.flatMap { _ =>
|
||||
val blockchain = Blockchain.fromHeaders(firstThreeBlocks.reverse)
|
||||
val handler = chainHandler.copy(blockchains = Vector(blockchain))
|
||||
|
||||
// Takes way too long to do all blocks
|
||||
val blockHeadersToTest = blockHeaders.tail
|
||||
.take(
|
||||
(2 * chainHandler.chainConfig.chain.difficultyChangeInterval + 1))
|
||||
|
||||
val processedF = handler.processHeaders(blockHeadersToTest)
|
||||
|
||||
for {
|
||||
ch <- processedF
|
||||
bestHash <- ch.getBestBlockHash
|
||||
} yield assert(bestHash == blockHeadersToTest.last.hashBE)
|
||||
}
|
||||
}
|
||||
|
||||
// B
|
||||
// C -> D
|
||||
it must "handle a very basic reorg where one chain is one block behind the best chain" in {
|
||||
|
@ -553,40 +400,6 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
|||
}
|
||||
}
|
||||
|
||||
it must "properly recalculate chain work" in { tempHandler: ChainHandler =>
|
||||
val headersWithNoWork = Vector(
|
||||
BlockHeaderDbHelper.fromBlockHeader(3,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562464),
|
||||
BlockHeaderDbHelper.fromBlockHeader(2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463),
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
)
|
||||
|
||||
val blockchain = Blockchain(headersWithNoWork :+ genesis)
|
||||
|
||||
val chainHandler = tempHandler.copy(blockchains = Vector(blockchain))
|
||||
|
||||
for {
|
||||
_ <- chainHandler.blockHeaderDAO.createAll(headersWithNoWork)
|
||||
isMissingWork <- chainHandler.isMissingChainWork
|
||||
_ = assert(isMissingWork)
|
||||
newHandler <- chainHandler.recalculateChainWork
|
||||
headerDb <- newHandler.getBestBlockHeader()
|
||||
} yield {
|
||||
assert(headerDb.height == headersWithNoWork.head.height)
|
||||
assert(
|
||||
newHandler.blockchains.head
|
||||
.groupBy(_.hashBE)
|
||||
.forall(_._2.size == 1))
|
||||
assert(headerDb.hashBE == headersWithNoWork.head.hashBE)
|
||||
assert(headerDb.chainWork == BigInt(12885098501L))
|
||||
}
|
||||
}
|
||||
|
||||
it must "get best filter header with zero blockchains in memory" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val noChainsChainHandler = chainHandler.copy(blockchains = Vector.empty)
|
||||
|
@ -600,88 +413,4 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
|||
assert(filterHeaderOpt.get == ChainUnitTest.genesisFilterHeaderDb)
|
||||
}
|
||||
}
|
||||
|
||||
final def processHeaders(
|
||||
processorF: Future[ChainApi],
|
||||
headers: Vector[BlockHeader],
|
||||
height: Int): Future[Assertion] = {
|
||||
|
||||
def processedHeadersF =
|
||||
for {
|
||||
chainApi <- processorF
|
||||
chainApiWithHeaders <-
|
||||
FutureUtil.foldLeftAsync(chainApi, headers.grouped(2000).toVector)(
|
||||
(chainApi, headers) => chainApi.processHeaders(headers))
|
||||
} yield {
|
||||
FutureUtil.foldLeftAsync((Option.empty[BlockHeaderDb],
|
||||
height,
|
||||
Vector.empty[Future[Assertion]]),
|
||||
headers) {
|
||||
case ((prevHeaderDbOpt, height, assertions), header) =>
|
||||
for {
|
||||
headerOpt <- chainApiWithHeaders.getHeader(header.hashBE)
|
||||
} yield {
|
||||
val chainWork = prevHeaderDbOpt match {
|
||||
case None => Pow.getBlockProof(header)
|
||||
case Some(prevHeader) =>
|
||||
prevHeader.chainWork + Pow.getBlockProof(header)
|
||||
}
|
||||
|
||||
val expectedBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(height, chainWork, header)
|
||||
|
||||
val newHeight = height + 1
|
||||
|
||||
val newAssertions = assertions :+ Future(
|
||||
assert(headerOpt.contains(expectedBlockHeaderDb)))
|
||||
|
||||
(Some(expectedBlockHeaderDb), newHeight, newAssertions)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for {
|
||||
processedHeaders <- processedHeadersF
|
||||
(_, _, vecFutAssert) <- processedHeaders
|
||||
assertion <- ScalaTestUtil.toAssertF(vecFutAssert)
|
||||
} yield {
|
||||
assertion
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Builds two competing headers that are built from the same parent */
|
||||
private def buildCompetingHeaders(
|
||||
parent: BlockHeaderDb): (BlockHeader, BlockHeader) = {
|
||||
val newHeaderB =
|
||||
BlockHeaderHelper.buildNextHeader(parent)
|
||||
|
||||
val newHeaderC =
|
||||
BlockHeaderHelper.buildNextHeader(parent)
|
||||
|
||||
(newHeaderB.blockHeader, newHeaderC.blockHeader)
|
||||
}
|
||||
|
||||
case class ReorgFixture(
|
||||
chainApi: ChainApi,
|
||||
headerDb1: BlockHeaderDb,
|
||||
headerDb2: BlockHeaderDb,
|
||||
oldBestBlockHeader: BlockHeaderDb) {
|
||||
lazy val header1: BlockHeader = headerDb1.blockHeader
|
||||
lazy val header2: BlockHeader = headerDb2.blockHeader
|
||||
}
|
||||
|
||||
/** Builds two competing headers off of the [[ChainHandler.getBestBlockHash best chain tip]] */
|
||||
private def buildChainHandlerCompetingHeaders(
|
||||
chainHandler: ChainHandler): Future[ReorgFixture] = {
|
||||
for {
|
||||
oldBestTip <- chainHandler.getBestBlockHeader()
|
||||
(newHeaderB, newHeaderC) = buildCompetingHeaders(oldBestTip)
|
||||
newChainApi <- chainHandler.processHeaders(Vector(newHeaderB, newHeaderC))
|
||||
newHeaderDbB <- newChainApi.getHeader(newHeaderB.hashBE)
|
||||
newHeaderDbC <- newChainApi.getHeader(newHeaderC.hashBE)
|
||||
} yield {
|
||||
ReorgFixture(newChainApi, newHeaderDbB.get, newHeaderDbC.get, oldBestTip)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,214 @@
|
|||
package org.bitcoins.chain.blockchain
|
||||
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.chain.pow.Pow
|
||||
import org.bitcoins.core.api.chain.db.{BlockHeaderDb, BlockHeaderDbHelper}
|
||||
import org.bitcoins.core.protocol.blockchain.BlockHeader
|
||||
import org.bitcoins.testkit.chain.{
|
||||
ChainDbUnitTest,
|
||||
ChainTestUtil,
|
||||
ChainUnitTest
|
||||
}
|
||||
import org.bitcoins.testkit.chain.fixture.ChainFixtureTag
|
||||
import org.bitcoins.testkit.util.FileUtil
|
||||
import org.scalatest.FutureOutcome
|
||||
import play.api.libs.json.Json
|
||||
|
||||
import scala.concurrent.Future
|
||||
import scala.io.BufferedSource
|
||||
|
||||
class MainnetChainHandlerTest extends ChainDbUnitTest {
|
||||
|
||||
override type FixtureParam = ChainHandler
|
||||
|
||||
override val defaultTag: ChainFixtureTag = ChainFixtureTag.GenisisChainHandler
|
||||
|
||||
implicit override lazy val appConfig: ChainAppConfig = mainnetAppConfig
|
||||
|
||||
val source: BufferedSource = FileUtil.getFileAsSource("block_headers.json")
|
||||
val arrStr: String = source.getLines.next
|
||||
source.close()
|
||||
|
||||
import org.bitcoins.commons.serializers.JsonReaders.BlockHeaderReads
|
||||
|
||||
val headersResult: Vector[BlockHeader] =
|
||||
Json.parse(arrStr).validate[Vector[BlockHeader]].get
|
||||
|
||||
val genesis: BlockHeaderDb = ChainUnitTest.genesisHeaderDb
|
||||
|
||||
override def withFixture(test: OneArgAsyncTest): FutureOutcome =
|
||||
withChainHandler(test)
|
||||
|
||||
behavior of "MainnetChainHandler"
|
||||
|
||||
it must "benchmark ChainHandler.processHeaders()" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val blockHeaders =
|
||||
headersResult.drop(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - ChainUnitTest.FIRST_BLOCK_HEIGHT)
|
||||
|
||||
val firstBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - 2,
|
||||
Pow.getBlockProof(ChainTestUtil.blockHeader562462),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val secondBlockHeaderDb = {
|
||||
val chainWork = firstBlockHeaderDb.chainWork + Pow.getBlockProof(
|
||||
ChainTestUtil.blockHeader562463)
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 1,
|
||||
chainWork,
|
||||
ChainTestUtil.blockHeader562463)
|
||||
}
|
||||
|
||||
val thirdBlockHeaderDb = {
|
||||
val chainWork = secondBlockHeaderDb.chainWork + Pow.getBlockProof(
|
||||
ChainTestUtil.blockHeader562464)
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE,
|
||||
chainWork,
|
||||
ChainTestUtil.blockHeader562464)
|
||||
}
|
||||
|
||||
/*
|
||||
* We need to insert one block before the first POW check because it is used on the next
|
||||
* POW check. We then need to insert the next to blocks to circumvent a POW check since
|
||||
* that would require we have an old block in the Blockchain that we don't have.
|
||||
*/
|
||||
val firstThreeBlocks =
|
||||
Vector(firstBlockHeaderDb, secondBlockHeaderDb, thirdBlockHeaderDb)
|
||||
|
||||
val createdF = chainHandler.blockHeaderDAO.createAll(firstThreeBlocks)
|
||||
|
||||
createdF.flatMap { _ =>
|
||||
val blockchain = Blockchain.fromHeaders(firstThreeBlocks.reverse)
|
||||
val handler = chainHandler.copy(blockchains = Vector(blockchain))
|
||||
|
||||
// Takes way too long to do all blocks
|
||||
val blockHeadersToTest = blockHeaders.tail
|
||||
.take(
|
||||
(2 * chainHandler.chainConfig.chain.difficultyChangeInterval + 1))
|
||||
|
||||
val processedF = handler.processHeaders(blockHeadersToTest)
|
||||
|
||||
for {
|
||||
ch <- processedF
|
||||
bestHash <- ch.getBestBlockHash
|
||||
} yield assert(bestHash == blockHeadersToTest.last.hashBE)
|
||||
}
|
||||
}
|
||||
|
||||
it must "have getBestBlockHash return the header with the most work, not the highest" in {
|
||||
tempHandler: ChainHandler =>
|
||||
val dummyHeader =
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val highestHeader =
|
||||
BlockHeaderDbHelper.fromBlockHeader(2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463)
|
||||
|
||||
val headerWithMostWork =
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(1000),
|
||||
ChainTestUtil.blockHeader562464)
|
||||
|
||||
val tallestBlockchain =
|
||||
Blockchain(Vector(highestHeader, dummyHeader, genesis))
|
||||
val mostWorkChain = Blockchain(Vector(headerWithMostWork, genesis))
|
||||
|
||||
val chainHandler =
|
||||
tempHandler.copy(blockchains = Vector(tallestBlockchain, mostWorkChain))
|
||||
|
||||
for {
|
||||
hash <- chainHandler.getBestBlockHash()
|
||||
} yield {
|
||||
assert(hash == headerWithMostWork.blockHeader.hashBE)
|
||||
}
|
||||
}
|
||||
|
||||
it must "be able to process and fetch real headers from mainnet" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val blockHeaders =
|
||||
headersResult.drop(
|
||||
ChainUnitTest.FIRST_POW_CHANGE - ChainUnitTest.FIRST_BLOCK_HEIGHT)
|
||||
|
||||
val firstBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
|
||||
val secondBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE - 1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463)
|
||||
|
||||
val thirdBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(ChainUnitTest.FIRST_POW_CHANGE,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562464)
|
||||
|
||||
/*
|
||||
* We need to insert one block before the first POW check because it is used on the next
|
||||
* POW check. We then need to insert the next to blocks to circumvent a POW check since
|
||||
* that would require we have an old block in the Blockchain that we don't have.
|
||||
*/
|
||||
val firstThreeBlocks =
|
||||
Vector(firstBlockHeaderDb, secondBlockHeaderDb, thirdBlockHeaderDb)
|
||||
|
||||
val createdF = chainHandler.blockHeaderDAO.createAll(firstThreeBlocks)
|
||||
|
||||
createdF.flatMap { _ =>
|
||||
val blockchain = Blockchain.fromHeaders(firstThreeBlocks.reverse)
|
||||
val handler = ChainHandler(chainHandler.blockHeaderDAO,
|
||||
chainHandler.filterHeaderDAO,
|
||||
chainHandler.filterDAO,
|
||||
blockchain)
|
||||
val processorF = Future.successful(handler)
|
||||
// Takes way too long to do all blocks
|
||||
val blockHeadersToTest = blockHeaders.tail
|
||||
.take(
|
||||
(2 * chainHandler.chainConfig.chain.difficultyChangeInterval + 1).toInt)
|
||||
|
||||
processHeaders(processorF = processorF,
|
||||
headers = blockHeadersToTest,
|
||||
height = ChainUnitTest.FIRST_POW_CHANGE + 1)
|
||||
}
|
||||
}
|
||||
|
||||
it must "properly recalculate chain work" in { tempHandler: ChainHandler =>
|
||||
val headersWithNoWork = Vector(
|
||||
BlockHeaderDbHelper.fromBlockHeader(3,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562464),
|
||||
BlockHeaderDbHelper.fromBlockHeader(2,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562463),
|
||||
BlockHeaderDbHelper.fromBlockHeader(1,
|
||||
BigInt(0),
|
||||
ChainTestUtil.blockHeader562462)
|
||||
)
|
||||
|
||||
val blockchain = Blockchain(headersWithNoWork :+ genesis)
|
||||
|
||||
val chainHandler = tempHandler.copy(blockchains = Vector(blockchain))
|
||||
|
||||
for {
|
||||
_ <- chainHandler.blockHeaderDAO.createAll(headersWithNoWork)
|
||||
isMissingWork <- chainHandler.isMissingChainWork
|
||||
_ = assert(isMissingWork)
|
||||
newHandler <- chainHandler.recalculateChainWork
|
||||
headerDb <- newHandler.getBestBlockHeader()
|
||||
} yield {
|
||||
assert(headerDb.height == headersWithNoWork.head.height)
|
||||
assert(
|
||||
newHandler.blockchains.head
|
||||
.groupBy(_.hashBE)
|
||||
.forall(_._2.size == 1))
|
||||
assert(headerDb.hashBE == headersWithNoWork.head.hashBE)
|
||||
assert(headerDb.chainWork == BigInt(12885098501L))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -12,6 +12,7 @@ import org.bitcoins.chain.models._
|
|||
import org.bitcoins.chain.pow.Pow
|
||||
import org.bitcoins.core.api.chain.db._
|
||||
import org.bitcoins.core.protocol.blockchain.{Block, BlockHeader}
|
||||
import org.bitcoins.core.util.FutureUtil
|
||||
import org.bitcoins.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.db.AppConfig
|
||||
import org.bitcoins.rpc.client.common.{BitcoindRpcClient, BitcoindVersion}
|
||||
|
@ -20,6 +21,7 @@ import org.bitcoins.testkit.chain.fixture._
|
|||
import org.bitcoins.testkit.fixtures.BitcoinSFixture
|
||||
import org.bitcoins.testkit.node.CachedChainAppConfig
|
||||
import org.bitcoins.testkit.rpc.BitcoindRpcTestUtil
|
||||
import org.bitcoins.testkit.util.ScalaTestUtil
|
||||
import org.bitcoins.testkit.{chain, BitcoinSTestAppConfig}
|
||||
import org.bitcoins.zmq.ZMQSubscriber
|
||||
import org.scalatest._
|
||||
|
@ -283,6 +285,90 @@ trait ChainUnitTest
|
|||
makeDependentFixture(builder, ChainUnitTest.destroyBitcoindV19ChainApi)(
|
||||
test)
|
||||
}
|
||||
|
||||
final def processHeaders(
|
||||
processorF: Future[ChainApi],
|
||||
headers: Vector[BlockHeader],
|
||||
height: Int): Future[Assertion] = {
|
||||
|
||||
def processedHeadersF =
|
||||
for {
|
||||
chainApi <- processorF
|
||||
chainApiWithHeaders <-
|
||||
FutureUtil.foldLeftAsync(chainApi, headers.grouped(2000).toVector)(
|
||||
(chainApi, headers) => chainApi.processHeaders(headers))
|
||||
} yield {
|
||||
FutureUtil.foldLeftAsync((Option.empty[BlockHeaderDb],
|
||||
height,
|
||||
Vector.empty[Future[Assertion]]),
|
||||
headers) {
|
||||
case ((prevHeaderDbOpt, height, assertions), header) =>
|
||||
for {
|
||||
headerOpt <- chainApiWithHeaders.getHeader(header.hashBE)
|
||||
} yield {
|
||||
val chainWork = prevHeaderDbOpt match {
|
||||
case None => Pow.getBlockProof(header)
|
||||
case Some(prevHeader) =>
|
||||
prevHeader.chainWork + Pow.getBlockProof(header)
|
||||
}
|
||||
|
||||
val expectedBlockHeaderDb =
|
||||
BlockHeaderDbHelper.fromBlockHeader(height, chainWork, header)
|
||||
|
||||
val newHeight = height + 1
|
||||
|
||||
val newAssertions = assertions :+ Future(
|
||||
assert(headerOpt.contains(expectedBlockHeaderDb)))
|
||||
|
||||
(Some(expectedBlockHeaderDb), newHeight, newAssertions)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for {
|
||||
processedHeaders <- processedHeadersF
|
||||
(_, _, vecFutAssert) <- processedHeaders
|
||||
assertion <- ScalaTestUtil.toAssertF(vecFutAssert)
|
||||
} yield {
|
||||
assertion
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Builds two competing headers that are built from the same parent */
|
||||
private def buildCompetingHeaders(
|
||||
parent: BlockHeaderDb): (BlockHeader, BlockHeader) = {
|
||||
val newHeaderB =
|
||||
BlockHeaderHelper.buildNextHeader(parent)
|
||||
|
||||
val newHeaderC =
|
||||
BlockHeaderHelper.buildNextHeader(parent)
|
||||
|
||||
(newHeaderB.blockHeader, newHeaderC.blockHeader)
|
||||
}
|
||||
|
||||
case class ReorgFixture(
|
||||
chainApi: ChainApi,
|
||||
headerDb1: BlockHeaderDb,
|
||||
headerDb2: BlockHeaderDb,
|
||||
oldBestBlockHeader: BlockHeaderDb) {
|
||||
lazy val header1: BlockHeader = headerDb1.blockHeader
|
||||
lazy val header2: BlockHeader = headerDb2.blockHeader
|
||||
}
|
||||
|
||||
/** Builds two competing headers off of the [[ChainHandler.getBestBlockHash best chain tip]] */
|
||||
def buildChainHandlerCompetingHeaders(
|
||||
chainHandler: ChainHandler): Future[ReorgFixture] = {
|
||||
for {
|
||||
oldBestTip <- chainHandler.getBestBlockHeader()
|
||||
(newHeaderB, newHeaderC) = buildCompetingHeaders(oldBestTip)
|
||||
newChainApi <- chainHandler.processHeaders(Vector(newHeaderB, newHeaderC))
|
||||
newHeaderDbB <- newChainApi.getHeader(newHeaderB.hashBE)
|
||||
newHeaderDbC <- newChainApi.getHeader(newHeaderC.hashBE)
|
||||
} yield {
|
||||
ReorgFixture(newChainApi, newHeaderDbB.get, newHeaderDbC.get, oldBestTip)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object ChainUnitTest extends ChainVerificationLogger {
|
||||
|
|
Loading…
Add table
Reference in a new issue