mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2025-01-18 13:24:25 +01:00
Fix Node Startup Issue (#1683)
* Fix Node Startup Issue * Small fixes * Add option to force chain work recalc
This commit is contained in:
parent
f41039b31b
commit
51d35e24e9
@ -27,7 +27,7 @@ import scala.concurrent.{ExecutionContext, Future, Promise}
|
||||
object Main extends App with BitcoinSLogger {
|
||||
|
||||
private def runMain(): Unit = {
|
||||
implicit val system = ActorSystem("bitcoin-s")
|
||||
implicit val system: ActorSystem = ActorSystem("bitcoin-s")
|
||||
implicit val ec: ExecutionContext = system.dispatcher
|
||||
val argsWithIndex = args.zipWithIndex
|
||||
|
||||
@ -51,6 +51,10 @@ object Main extends App with BitcoinSLogger {
|
||||
case (_, idx) => args(idx + 1).toInt
|
||||
}
|
||||
}
|
||||
|
||||
val forceChainWorkRecalc: Boolean =
|
||||
argsWithIndex.exists(_._1.toLowerCase == "--force-recalc")
|
||||
|
||||
val logger = HttpLoggerImpl(conf.nodeConf).getLogger
|
||||
|
||||
implicit val walletConf: WalletAppConfig = conf.walletConf
|
||||
@ -73,9 +77,9 @@ object Main extends App with BitcoinSLogger {
|
||||
//initialize the config, run migrations
|
||||
val configInitializedF = conf.initialize()
|
||||
|
||||
//run chainwork migration
|
||||
//run chain work migration
|
||||
val chainApiF = configInitializedF.flatMap { _ =>
|
||||
runChainWorkCalc()
|
||||
runChainWorkCalc(forceChainWorkRecalc)
|
||||
}
|
||||
|
||||
//get a node that isn't started
|
||||
@ -213,7 +217,7 @@ object Main extends App with BitcoinSLogger {
|
||||
}
|
||||
|
||||
/** This is needed for migrations V2/V3 on the chain project to re-calculate the total work for the chain */
|
||||
private def runChainWorkCalc()(implicit
|
||||
private def runChainWorkCalc(force: Boolean)(implicit
|
||||
chainAppConfig: ChainAppConfig,
|
||||
ec: ExecutionContext): Future[ChainApi] = {
|
||||
for {
|
||||
@ -222,7 +226,7 @@ object Main extends App with BitcoinSLogger {
|
||||
CompactFilterDAO())
|
||||
isMissingChainWork <- chainApi.isMissingChainWork
|
||||
chainApiWithWork <-
|
||||
if (isMissingChainWork) {
|
||||
if (isMissingChainWork || force) {
|
||||
chainApi.recalculateChainWork
|
||||
} else {
|
||||
logger.info(s"Chain work already calculated")
|
||||
|
@ -54,26 +54,28 @@ case class ChainHandler(
|
||||
}
|
||||
|
||||
override def getBestBlockHeader(): Future[BlockHeaderDb] = {
|
||||
logger.debug(s"Querying for best block hash")
|
||||
//https://bitcoin.org/en/glossary/block-chain
|
||||
val groupedChains = blockchains.groupBy(_.tip.chainWork)
|
||||
val maxWork = groupedChains.keys.max
|
||||
val chains = groupedChains(maxWork)
|
||||
Future {
|
||||
logger.debug(s"Querying for best block hash")
|
||||
//https://bitcoin.org/en/glossary/block-chain
|
||||
val groupedChains = blockchains.groupBy(_.tip.chainWork)
|
||||
val maxWork = groupedChains.keys.max
|
||||
val chains = groupedChains(maxWork)
|
||||
|
||||
val bestHeader: BlockHeaderDb = chains match {
|
||||
case Vector() =>
|
||||
// This should never happen
|
||||
val errMsg = s"Did not find blockchain with work $maxWork"
|
||||
logger.error(errMsg)
|
||||
throw new RuntimeException(errMsg)
|
||||
case chain +: Vector() =>
|
||||
chain.tip
|
||||
case chain +: rest =>
|
||||
logger.warn(
|
||||
s"We have multiple competing blockchains: ${(chain +: rest).map(_.tip.hashBE.hex).mkString(", ")}")
|
||||
chain.tip
|
||||
val bestHeader: BlockHeaderDb = chains match {
|
||||
case Vector() =>
|
||||
// This should never happen
|
||||
val errMsg = s"Did not find blockchain with work $maxWork"
|
||||
logger.error(errMsg)
|
||||
throw new RuntimeException(errMsg)
|
||||
case chain +: Vector() =>
|
||||
chain.tip
|
||||
case chain +: rest =>
|
||||
logger.warn(
|
||||
s"We have multiple competing blockchains: ${(chain +: rest).map(_.tip.hashBE.hex).mkString(", ")}")
|
||||
chain.tip
|
||||
}
|
||||
bestHeader
|
||||
}
|
||||
Future.successful(bestHeader)
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
|
@ -5,6 +5,7 @@ import java.nio.file.Path
|
||||
import com.typesafe.config.{Config, ConfigException}
|
||||
import org.bitcoins.chain.db.ChainDbManagement
|
||||
import org.bitcoins.chain.models.{BlockHeaderDAO, BlockHeaderDbHelper}
|
||||
import org.bitcoins.chain.pow.Pow
|
||||
import org.bitcoins.core.util.FutureUtil
|
||||
import org.bitcoins.db._
|
||||
|
||||
@ -68,10 +69,11 @@ case class ChainAppConfig(
|
||||
FutureUtil.unit
|
||||
} else {
|
||||
val genesisHeader =
|
||||
BlockHeaderDbHelper.fromBlockHeader(height = 0,
|
||||
chainWork = BigInt(0),
|
||||
bh =
|
||||
chain.genesisBlock.blockHeader)
|
||||
BlockHeaderDbHelper.fromBlockHeader(
|
||||
height = 0,
|
||||
chainWork = Pow.getBlockProof(chain.genesisBlock.blockHeader),
|
||||
bh = chain.genesisBlock.blockHeader)
|
||||
|
||||
val blockHeaderDAO = BlockHeaderDAO()(ec, appConfig)
|
||||
val bhCreatedF = blockHeaderDAO.create(genesisHeader)
|
||||
bhCreatedF.flatMap { _ =>
|
||||
|
@ -75,7 +75,11 @@ class DbCommonsColumnMappers(val profile: JdbcProfile) {
|
||||
MappedColumnType
|
||||
.base[BigInt, Array[Byte]](
|
||||
bi => ByteVector(bi.toByteArray).padLeft(33).toArray,
|
||||
BigInt(1, _))
|
||||
{ arr =>
|
||||
val bytes = arr.dropWhile(_ == 0x00)
|
||||
BigInt(1, bytes)
|
||||
}
|
||||
)
|
||||
|
||||
implicit val bigIntPostgresMapper: BaseColumnType[BigInt] =
|
||||
MappedColumnType
|
||||
|
@ -148,7 +148,7 @@ trait Node extends NodeApi with ChainQueryApi with P2PLogger {
|
||||
filterHeaderCount <- chainApi.getFilterHeaderCount
|
||||
} yield {
|
||||
logger.info(
|
||||
s"Started node, best block hash ${bestHash.hex} at height $bestHeight, with $filterHeaderCount filter headers and $filterCount filers")
|
||||
s"Started node, best block hash ${bestHash.hex} at height $bestHeight, with $filterHeaderCount filter headers and $filterCount filters")
|
||||
node
|
||||
}
|
||||
}
|
||||
@ -190,15 +190,11 @@ trait Node extends NodeApi with ChainQueryApi with P2PLogger {
|
||||
def sync(): Future[Unit] = {
|
||||
for {
|
||||
chainApi <- chainApiFromDb()
|
||||
hash <- chainApi.getBestBlockHash()
|
||||
header <-
|
||||
chainApi
|
||||
.getHeader(hash)
|
||||
.map(_.get) // .get is safe since this is an internal call
|
||||
|
||||
header <- chainApi.getBestBlockHeader()
|
||||
} yield {
|
||||
peerMsgSenderF.map(_.sendGetHeadersMessage(hash.flip))
|
||||
logger.info(s"Starting sync node, height=${header.height} hash=$hash")
|
||||
peerMsgSenderF.map(_.sendGetHeadersMessage(header.hashBE.flip))
|
||||
logger.info(
|
||||
s"Starting sync node, height=${header.height} hash=${header.hashBE}")
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user