mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2025-01-18 13:24:25 +01:00
Increase chain code coverage (#2023)
This commit is contained in:
parent
db852e4ffb
commit
16599010c7
@ -1,7 +1,10 @@
|
||||
package org.bitcoins.chain.blockchain
|
||||
|
||||
import akka.actor.ActorSystem
|
||||
import org.bitcoins.chain.validation.TipUpdateResult
|
||||
import org.bitcoins.core.api.chain.db.BlockHeaderDb
|
||||
import org.bitcoins.core.number.UInt32
|
||||
import org.bitcoins.core.protocol.blockchain.BlockHeader
|
||||
import org.bitcoins.testkit.chain.fixture.ChainFixture
|
||||
import org.bitcoins.testkit.chain.{BlockHeaderHelper, ChainUnitTest}
|
||||
import org.scalatest.FutureOutcome
|
||||
@ -35,7 +38,7 @@ class BlockchainTest extends ChainUnitTest {
|
||||
assert(newHeader == newChain.tip)
|
||||
|
||||
case _ @(_: ConnectTipResult.Reorg | _: ConnectTipResult.BadTip) =>
|
||||
assert(false)
|
||||
fail()
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,4 +80,49 @@ class BlockchainTest extends ChainUnitTest {
|
||||
|
||||
assert(reconstructed.isEmpty)
|
||||
}
|
||||
|
||||
it must "fail to create a BlockchainUpdate.Failed with incompatible successful headers" inFixtured {
|
||||
case ChainFixture.Empty =>
|
||||
val genesis = ChainUnitTest.genesisHeaderDb
|
||||
val second = BlockHeaderHelper.buildNextHeader(genesis)
|
||||
val chain = Blockchain(Vector(second, genesis))
|
||||
|
||||
assertThrows[IllegalArgumentException] {
|
||||
BlockchainUpdate.Failed(chain,
|
||||
Vector(genesis),
|
||||
second.blockHeader,
|
||||
TipUpdateResult.BadNonce(second.blockHeader))
|
||||
}
|
||||
}
|
||||
|
||||
it must "correctly calculate a BlockchainUpdate.Success's height" inFixtured {
|
||||
case ChainFixture.Empty =>
|
||||
val genesis = ChainUnitTest.genesisHeaderDb
|
||||
val second = BlockHeaderHelper.buildNextHeader(genesis)
|
||||
val chain = Blockchain(Vector(second, genesis))
|
||||
|
||||
val updated = BlockchainUpdate.Successful(chain, chain.toVector)
|
||||
|
||||
assert(updated.height == chain.height)
|
||||
}
|
||||
|
||||
it must "correctly identify a bad tip" inFixtured {
|
||||
case ChainFixture.Empty =>
|
||||
val genesis = ChainUnitTest.genesisHeaderDb
|
||||
val chain = Blockchain(Vector(genesis))
|
||||
|
||||
val goodHeader = BlockHeaderHelper.buildNextHeader(genesis).blockHeader
|
||||
val badHeader = BlockHeader(
|
||||
version = goodHeader.version,
|
||||
previousBlockHash = goodHeader.previousBlockHash,
|
||||
merkleRootHash = goodHeader.merkleRootHash,
|
||||
time = goodHeader.time,
|
||||
nBits = UInt32.zero,
|
||||
nonce = goodHeader.nonce
|
||||
)
|
||||
|
||||
val result = Blockchain.connectTip(badHeader, chain)
|
||||
|
||||
assert(result.isInstanceOf[ConnectTipResult.BadTip])
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package org.bitcoins.chain.blockchain
|
||||
|
||||
import org.bitcoins.chain.{ChainCallbacks, OnBlockHeaderConnected}
|
||||
import org.bitcoins.chain.pow.Pow
|
||||
import org.bitcoins.core.api.chain.ChainApi
|
||||
import org.bitcoins.core.api.chain.db.{BlockHeaderDb, BlockHeaderDbHelper}
|
||||
@ -26,7 +27,7 @@ import org.bitcoins.testkit.util.{FileUtil, ScalaTestUtil}
|
||||
import org.scalatest.{Assertion, FutureOutcome}
|
||||
import play.api.libs.json.Json
|
||||
|
||||
import scala.concurrent.Future
|
||||
import scala.concurrent.{Future, Promise}
|
||||
|
||||
class ChainHandlerTest extends ChainDbUnitTest {
|
||||
|
||||
@ -51,6 +52,25 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
||||
nonce = UInt32(2083236893)
|
||||
)
|
||||
|
||||
it must "throw an error when we have no chains" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val handler = chainHandler.copy(blockchains = Vector.empty)
|
||||
|
||||
recoverToSucceededIf[RuntimeException] {
|
||||
handler.getBestBlockHeader()
|
||||
}
|
||||
}
|
||||
|
||||
it must "throw an error when we have no headers" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val handler =
|
||||
chainHandler.copy(blockchains = Vector(Blockchain(Vector.empty)))
|
||||
|
||||
recoverToSucceededIf[RuntimeException] {
|
||||
handler.getBestBlockHeader()
|
||||
}
|
||||
}
|
||||
|
||||
it must "process a new valid block header, and then be able to fetch that header" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val newValidHeader =
|
||||
@ -460,6 +480,40 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
||||
}
|
||||
}
|
||||
|
||||
it must "return none for the number of confirmations for a non-existent block" in {
|
||||
chainHandler: ChainHandler =>
|
||||
chainHandler.getNumberOfConfirmations(DoubleSha256DigestBE.empty).map {
|
||||
result =>
|
||||
assert(result.isEmpty)
|
||||
}
|
||||
}
|
||||
|
||||
// G -> A -> B
|
||||
// G -> C -> D -> E
|
||||
it must "return none for the number of confirmations for a reorged block" in {
|
||||
chainHandler: ChainHandler =>
|
||||
for {
|
||||
genesis <- chainHandler.getBestBlockHeader()
|
||||
|
||||
oldFirst = BlockHeaderHelper.buildNextHeader(genesis)
|
||||
oldSecond = BlockHeaderHelper.buildNextHeader(oldFirst)
|
||||
startChain = Vector(oldFirst, oldSecond)
|
||||
|
||||
toBeReorged <-
|
||||
chainHandler.processHeaders(startChain.map(_.blockHeader))
|
||||
oldTip <- toBeReorged.getBestBlockHeader()
|
||||
_ = assert(oldTip.hashBE == oldSecond.hashBE)
|
||||
|
||||
newFirst = BlockHeaderHelper.buildNextHeader(genesis)
|
||||
newSecond = BlockHeaderHelper.buildNextHeader(newFirst)
|
||||
third = BlockHeaderHelper.buildNextHeader(newSecond)
|
||||
newChain = Vector(newFirst, newSecond, third)
|
||||
|
||||
reorged <- chainHandler.processHeaders(newChain.map(_.blockHeader))
|
||||
confs <- reorged.getNumberOfConfirmations(oldSecond.hashBE)
|
||||
} yield assert(confs.isEmpty)
|
||||
}
|
||||
|
||||
it must "return the height by block stamp" in { chainHandler: ChainHandler =>
|
||||
for {
|
||||
bestBlock <- chainHandler.getBestBlockHeader()
|
||||
@ -497,4 +551,42 @@ class ChainHandlerTest extends ChainDbUnitTest {
|
||||
assert(filterHeaderOpt.get == ChainUnitTest.genesisFilterHeaderDb)
|
||||
}
|
||||
}
|
||||
|
||||
it must "fail when processing duplicate filters" in {
|
||||
chainHandler: ChainHandler =>
|
||||
recoverToSucceededIf[DuplicateFilters] {
|
||||
val filters = Vector.fill(2)(ChainUnitTest.genesisFilterMessage)
|
||||
|
||||
chainHandler.processFilters(filters)
|
||||
}
|
||||
}
|
||||
|
||||
it must "process no filters" in { chainHandler: ChainHandler =>
|
||||
chainHandler.processFilters(Vector.empty).map { newHandler =>
|
||||
assert(chainHandler == newHandler)
|
||||
}
|
||||
}
|
||||
|
||||
it must "process a new valid block header with a callback" in {
|
||||
chainHandler: ChainHandler =>
|
||||
val resultP: Promise[Boolean] = Promise()
|
||||
|
||||
val callback: OnBlockHeaderConnected = (_: Int, _: BlockHeader) => {
|
||||
Future {
|
||||
resultP.success(true)
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
val callbacks = ChainCallbacks(Vector(callback))
|
||||
chainHandler.chainConfig.addCallbacks(callbacks)
|
||||
|
||||
val newValidHeader =
|
||||
BlockHeaderHelper.buildNextHeader(ChainUnitTest.genesisHeaderDb)
|
||||
|
||||
for {
|
||||
_ <- chainHandler.processHeader(newValidHeader.blockHeader)
|
||||
result <- resultP.future
|
||||
} yield assert(result)
|
||||
}
|
||||
}
|
||||
|
@ -190,7 +190,8 @@ class MainnetChainHandlerTest extends ChainDbUnitTest {
|
||||
ChainTestUtil.blockHeader562462)
|
||||
)
|
||||
|
||||
val blockchain = Blockchain(headersWithNoWork :+ genesis)
|
||||
val blockchain =
|
||||
Blockchain(headersWithNoWork :+ genesis.copy(chainWork = BigInt(0)))
|
||||
|
||||
val chainHandler = tempHandler.copy(blockchains = Vector(blockchain))
|
||||
|
||||
|
@ -1,6 +1,14 @@
|
||||
package org.bitcoins.chain.models
|
||||
|
||||
import org.bitcoins.testkit.chain.ChainDbUnitTest
|
||||
import org.bitcoins.core.api.chain.db.{BlockHeaderDb, CompactFilterDb}
|
||||
import org.bitcoins.core.gcs.FilterType
|
||||
import org.bitcoins.crypto.{CryptoUtil, ECPrivateKey}
|
||||
import org.bitcoins.testkit.chain.{
|
||||
BlockHeaderHelper,
|
||||
ChainDbUnitTest,
|
||||
ChainTestUtil
|
||||
}
|
||||
import org.bitcoins.testkit.core.gen.CryptoGenerators
|
||||
import org.scalatest.FutureOutcome
|
||||
|
||||
class CompactFilterDAOTest extends ChainDbUnitTest {
|
||||
@ -15,6 +23,107 @@ class CompactFilterDAOTest extends ChainDbUnitTest {
|
||||
it must "retrieve getBestFilter when there are no filters in the db" in {
|
||||
compactFilterDAO: CompactFilterDAO =>
|
||||
compactFilterDAO.getBestFilter
|
||||
.map(opt => assert(opt == None))
|
||||
.map(opt => assert(opt.isEmpty))
|
||||
}
|
||||
|
||||
it must "create and read a filter from the database" in { compactFilterDAO =>
|
||||
val blockHeaderDAO = BlockHeaderDAO()
|
||||
val filterHeaderDAO = CompactFilterHeaderDAO()
|
||||
val blockHeaderDb = ChainTestUtil.regTestGenesisHeaderDb
|
||||
val filterHeaderDb = ChainTestUtil.regTestGenesisHeaderCompactFilterHeaderDb
|
||||
val original = ChainTestUtil.regTestGenesisHeaderCompactFilterDb
|
||||
|
||||
for {
|
||||
_ <- blockHeaderDAO.create(blockHeaderDb)
|
||||
_ <- filterHeaderDAO.create(filterHeaderDb)
|
||||
_ <- compactFilterDAO.create(original)
|
||||
fromDbOpt <- compactFilterDAO.read(original.blockHashBE)
|
||||
} yield assert(fromDbOpt.contains(original))
|
||||
}
|
||||
|
||||
it must "find filters between heights" in { compactFilterDAO =>
|
||||
val blockHeaderDAO = BlockHeaderDAO()
|
||||
val blockHeaderDb =
|
||||
BlockHeaderHelper.buildNextHeader(ChainTestUtil.regTestGenesisHeaderDb)
|
||||
val blockHeaderDbF = blockHeaderDAO.create(blockHeaderDb)
|
||||
val filterDb1F = for {
|
||||
blockHeaderDb <- blockHeaderDbF
|
||||
} yield {
|
||||
randomFilter(blockHeaderDb)
|
||||
}
|
||||
|
||||
val createdF = filterDb1F.flatMap(compactFilterDAO.create)
|
||||
|
||||
for {
|
||||
headerDb <- createdF
|
||||
fromDbVec <-
|
||||
compactFilterDAO.getBetweenHeights(headerDb.height, headerDb.height)
|
||||
} yield {
|
||||
assert(fromDbVec.length == 1)
|
||||
assert(fromDbVec.head == headerDb)
|
||||
}
|
||||
}
|
||||
|
||||
it must "get filters between heights when there are no filters" in {
|
||||
compactFilterDAO =>
|
||||
compactFilterDAO.getBetweenHeights(0, 1).map { result =>
|
||||
assert(result.isEmpty)
|
||||
}
|
||||
}
|
||||
|
||||
it must "get max height when there are no filters" in { compactFilterDAO =>
|
||||
compactFilterDAO.maxHeight.map { result =>
|
||||
assert(result == 0)
|
||||
}
|
||||
}
|
||||
|
||||
it must "find the filter with the heaviest work" in { compactFilterDAO =>
|
||||
val blockHeaderDAO = BlockHeaderDAO()
|
||||
val blockHeaderDbLightWork = {
|
||||
BlockHeaderHelper.buildNextHeader(ChainTestUtil.regTestGenesisHeaderDb)
|
||||
}
|
||||
|
||||
val blockHeaderDbHeavyWork = {
|
||||
blockHeaderDbLightWork.copy(
|
||||
chainWork =
|
||||
blockHeaderDbLightWork.chainWork + 1,
|
||||
hashBE = CryptoGenerators.doubleSha256Digest.sample.get.flip)
|
||||
}
|
||||
val headers = Vector(blockHeaderDbLightWork, blockHeaderDbHeavyWork)
|
||||
val blockHeaderDbF = blockHeaderDAO.createAll(headers)
|
||||
val filterDbLightWork = {
|
||||
randomFilter(blockHeaderDbLightWork)
|
||||
}
|
||||
|
||||
val filterDbHeavyWork = {
|
||||
randomFilter(blockHeaderDbHeavyWork)
|
||||
}
|
||||
|
||||
val filters =
|
||||
Vector(filterDbLightWork, filterDbHeavyWork)
|
||||
|
||||
val createdF = for {
|
||||
_ <- blockHeaderDbF
|
||||
created <- compactFilterDAO.createAll(filters)
|
||||
} yield created
|
||||
|
||||
for {
|
||||
_ <- createdF
|
||||
found <- compactFilterDAO.getBestFilter
|
||||
} yield {
|
||||
assert(found.nonEmpty)
|
||||
assert(found.get == filterDbHeavyWork)
|
||||
}
|
||||
}
|
||||
|
||||
private def randomFilter(blockHeader: BlockHeaderDb): CompactFilterDb = {
|
||||
val randBytes = ECPrivateKey.freshPrivateKey.bytes
|
||||
CompactFilterDb(
|
||||
CryptoUtil.doubleSHA256(randBytes).flip,
|
||||
filterType = FilterType.Basic,
|
||||
bytes = randBytes,
|
||||
blockHashBE = blockHeader.hashBE,
|
||||
height = blockHeader.height
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -37,13 +37,13 @@ private[blockchain] trait BaseBlockChain extends SeqWrapper[BlockHeaderDb] {
|
||||
protected[blockchain] def compObjectfromHeaders(
|
||||
headers: scala.collection.immutable.Seq[BlockHeaderDb]): Blockchain
|
||||
|
||||
val tip: BlockHeaderDb = headers.head
|
||||
lazy val tip: BlockHeaderDb = headers.head
|
||||
|
||||
require(headers.size <= 1 || headers(1).height == tip.height - 1,
|
||||
s"Headers must be in descending order, got ${headers.take(5)}")
|
||||
|
||||
/** The height of the chain */
|
||||
val height: Int = tip.height
|
||||
lazy val height: Int = tip.height
|
||||
|
||||
def headers: Vector[BlockHeaderDb]
|
||||
|
||||
|
@ -38,7 +38,7 @@ object BlockchainUpdate {
|
||||
s"Tip did not equal last successful header, tip=${blockchain.tip.hashBE} lastSuccessfulHeader=${successfulHeaders.head.hashBE}"
|
||||
)
|
||||
}
|
||||
def height: Long = blockchain.height
|
||||
lazy val height: Long = blockchain.height
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -356,7 +356,8 @@ case class ChainHandler(
|
||||
messages.groupBy(_.blockHash.flip).map {
|
||||
case (blockHash, messages) =>
|
||||
if (messages.size > 1)
|
||||
throw DuplicateFilters("Attempt to process duplicate filters")
|
||||
return Future.failed(
|
||||
DuplicateFilters("Attempt to process duplicate filters"))
|
||||
(blockHash, messages.head)
|
||||
}
|
||||
|
||||
@ -785,9 +786,9 @@ case class ChainHandler(
|
||||
//so while it looks like we are executing in parallel
|
||||
//in reality there is only one thread that can write to the db
|
||||
//at a single time
|
||||
_ = logger.trace(
|
||||
s"Upserting from height=${headersWithWork.headOption.map(_.height)} " +
|
||||
s"to height=${headersWithWork.lastOption.map(_.height)}")
|
||||
_ =
|
||||
logger.trace(s"Upserting from height=${headersWithWork.headOption.map(
|
||||
_.height)} to height=${headersWithWork.lastOption.map(_.height)}")
|
||||
_ <- FutureUtil.batchExecute(
|
||||
headersWithWork,
|
||||
blockHeaderDAO.upsertAll,
|
||||
|
Loading…
Reference in New Issue
Block a user