mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2024-11-19 01:40:55 +01:00
Turn on -Xlint (#5728)
Get entire codebase compiling Fix DLCDAO Revert .jvmopts Finish rebase
This commit is contained in:
parent
e419b18d9c
commit
29f10d046c
@ -152,7 +152,7 @@ sealed trait DLCWalletLoaderApi
|
||||
resetStateCallbackF.failed.foreach {
|
||||
case RescanState.RescanTerminatedEarly =>
|
||||
rescanStateOpt = None
|
||||
case scala.util.control.NonFatal(exn) =>
|
||||
case exn: Throwable =>
|
||||
logger.error(
|
||||
s"Failed to reset rescanState in wallet loader. Resetting rescan state",
|
||||
exn
|
||||
|
@ -40,6 +40,8 @@ case class NodeRoutes(nodeApi: NodeApi)(implicit system: ActorSystem)
|
||||
system.scheduler.scheduleOnce(7.seconds)(sys.exit())
|
||||
nodeStopping
|
||||
}
|
||||
case n: NodeApi =>
|
||||
sys.error(s"Unsupported NodeApi type=$n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -195,8 +195,8 @@ object BitcoindInstanceLocal
|
||||
def fromConfFile(
|
||||
file: File = BitcoindConfig.DEFAULT_CONF_FILE,
|
||||
binary: File = DEFAULT_BITCOIND_LOCATION match {
|
||||
case Some(file) => file
|
||||
case None => bitcoindLocationFromConfigFile
|
||||
case Some(f) => f
|
||||
case None => bitcoindLocationFromConfigFile
|
||||
}
|
||||
)(implicit system: ActorSystem): BitcoindInstanceLocal = {
|
||||
require(file.exists, s"${file.getPath} does not exist!")
|
||||
@ -213,8 +213,8 @@ object BitcoindInstanceLocal
|
||||
fromConfFile(
|
||||
file,
|
||||
DEFAULT_BITCOIND_LOCATION match {
|
||||
case Some(file) => file
|
||||
case None => bitcoindLocationFromConfigFile
|
||||
case Some(f) => f
|
||||
case None => bitcoindLocationFromConfigFile
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -24,10 +24,8 @@ class DLCTestVectorTest extends BitcoinSJvmTest {
|
||||
val vecResult = DLCTestVectorGen.readFromDefaultTestFile()
|
||||
assert(vecResult.isSuccess)
|
||||
|
||||
val vecF = vecResult.get.map(runTest(_))
|
||||
Future
|
||||
.sequence(vecF)
|
||||
.map(_ => succeed)
|
||||
val vecF = Future.traverse(vecResult.get)(runTest)
|
||||
vecF.map(_ => succeed)
|
||||
}
|
||||
|
||||
private def runTest(testVec: TestVector): Future[Assertion] = {
|
||||
@ -41,6 +39,7 @@ class DLCTestVectorTest extends BitcoinSJvmTest {
|
||||
case Success(regenerated) => assert(regenerated == testVec)
|
||||
case Failure(err) => fail(err)
|
||||
}
|
||||
case t: TestVector => sys.error(s"Incorrect test vector type=$t")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ object TaprootTestCase {
|
||||
sys.error(s"Expected string for prevouts, got=$x")
|
||||
}
|
||||
val index = obj("index").num.toInt
|
||||
val success = obj("success") match {
|
||||
val successPath = obj("success") match {
|
||||
case success: ujson.Obj =>
|
||||
val scriptSig = ScriptSignature.fromAsmHex(success("scriptSig").str)
|
||||
|
||||
@ -212,7 +212,7 @@ object TaprootTestCase {
|
||||
tx = transaction,
|
||||
prevouts = prevouts,
|
||||
index = index,
|
||||
success = success,
|
||||
success = successPath,
|
||||
failure = failure,
|
||||
flags = flags,
|
||||
`final` = finals,
|
||||
|
@ -110,6 +110,8 @@ class HDPathTest extends BitcoinSUnitTest {
|
||||
assert(value == path.asInstanceOf[SegWitHDPath])
|
||||
case value: NestedSegWitHDPath =>
|
||||
assert(value == path.asInstanceOf[NestedSegWitHDPath])
|
||||
case h: HDPath =>
|
||||
sys.error(s"Unsupported HDPath=$h")
|
||||
}
|
||||
resultOpt.getOrElse(
|
||||
fail(s"$path did not have toString/fromString symmetry")
|
||||
|
@ -24,7 +24,8 @@ class MerkleBlockMessageTest extends BitcoinSUnitTest {
|
||||
|
||||
assert(merkle.hashes.length == 4)
|
||||
|
||||
val Seq(first, second, third, fourth) = merkle.hashes
|
||||
val h = merkle.hashes
|
||||
val (first, second, third, fourth) = (h.head, h(1), h(2), h(3))
|
||||
val expectedFirst =
|
||||
DoubleSha256Digest.fromHex(
|
||||
"3612262624047ee87660be1a707519a443b1c1ce3d248cbfc6c15870f6c5daa2"
|
||||
|
@ -76,6 +76,7 @@ object CoreTransactionTestCase {
|
||||
case index
|
||||
if index >= UInt32.min.toLong && index <= UInt32.max.toLong =>
|
||||
UInt32(index)
|
||||
case l: Long => sys.error(s"Out of bounds=$l")
|
||||
}
|
||||
|
||||
val amount =
|
||||
|
@ -143,5 +143,6 @@ object AddressDbHelper {
|
||||
case legacy: LegacyHDPath => getLegacyAddress(pub, legacy, np)
|
||||
case nested: NestedSegWitHDPath => getNestedSegwitAddress(pub, nested, np)
|
||||
case segwit: SegWitHDPath => getSegwitAddress(pub, segwit, np)
|
||||
case x: HDPath => sys.error(s"Unknown HDPath type, got=$x")
|
||||
}
|
||||
}
|
||||
|
@ -235,16 +235,18 @@ sealed abstract class BloomFilter extends NetworkElement {
|
||||
txId: DoubleSha256Digest): BloomFilter = {
|
||||
@tailrec
|
||||
def loop(
|
||||
constantsWithIndex: Seq[(ScriptToken, Int)],
|
||||
accumFilter: BloomFilter): BloomFilter =
|
||||
constantsWithIndex: List[(ScriptToken, Int)],
|
||||
accumFilter: BloomFilter): BloomFilter = {
|
||||
constantsWithIndex match {
|
||||
case h +: t if accumFilter.contains(h._1.bytes) =>
|
||||
case h :: t if accumFilter.contains(h._1.bytes) =>
|
||||
val filter =
|
||||
accumFilter.insert(TransactionOutPoint(txId, UInt32(h._2)))
|
||||
loop(t, filter)
|
||||
case _ +: t => loop(t, accumFilter)
|
||||
case _ :: t => loop(t, accumFilter)
|
||||
case Nil => accumFilter
|
||||
}
|
||||
}
|
||||
|
||||
val p2pkOrMultiSigScriptPubKeys: Seq[(ScriptPubKey, Int)] =
|
||||
scriptPubKeysWithIndex.filter { case (s, _) =>
|
||||
s.isInstanceOf[P2PKScriptPubKey] ||
|
||||
@ -257,7 +259,7 @@ sealed abstract class BloomFilter extends NetworkElement {
|
||||
case (token, _) => token.isInstanceOf[ScriptConstant]
|
||||
}
|
||||
}
|
||||
loop(scriptConstantsWithOutputIndex, this)
|
||||
loop(scriptConstantsWithOutputIndex.toList, this)
|
||||
}
|
||||
|
||||
/** Performs the [[scala.util.hashing.MurmurHash3 MurmurHash3]] on the given
|
||||
|
@ -28,12 +28,6 @@ trait Merkle {
|
||||
def computeBlockMerkleRoot(block: Block): DoubleSha256Digest =
|
||||
computeMerkleRoot(block.transactions)
|
||||
|
||||
/** Computes the merkle root for the given sequence of transactions
|
||||
* @param transactions
|
||||
* the list of transactions whose merkle root needs to be computed
|
||||
* @return
|
||||
* the merkle root for the sequence of transactions
|
||||
*/
|
||||
def computeMerkleRoot(
|
||||
transactions: Vector[Transaction]): DoubleSha256Digest = {
|
||||
val result = if (transactions.isEmpty) {
|
||||
|
@ -208,10 +208,10 @@ trait GetHeadersMessage extends DataPayload with ExpectsResponse {
|
||||
override def toString(): String = {
|
||||
val count = hashCount.toInt
|
||||
// only display first hash, otherwise this gets really long
|
||||
val hashesStr = hashes match {
|
||||
case Nil => "empty"
|
||||
case head +: Nil => head.toString
|
||||
case head +: _ => s"$head, ..."
|
||||
val hashesStr = {
|
||||
if (hashes.isEmpty) "empty"
|
||||
else if (hashes.length == 1) hashes.head
|
||||
else s"${hashes.head}, ..."
|
||||
}
|
||||
s"GetHeadersMessage($version, hashCount=$count, hashes=$hashesStr, stop=$hashStop)"
|
||||
}
|
||||
|
@ -198,9 +198,9 @@ object Bech32Address extends AddressFactory[Bech32Address] {
|
||||
* [[org.bitcoins.core.protocol.BtcHumanReadablePart HumanReadablePart]] &
|
||||
* data part
|
||||
*/
|
||||
override def fromString(bech32: String): Bech32Address = {
|
||||
override def fromString(bech32Str: String): Bech32Address = {
|
||||
val bech32T = for {
|
||||
(hrp, data) <- Bech32.splitToHrpAndData(bech32, Bech32Encoding.Bech32)
|
||||
(hrp, data) <- Bech32.splitToHrpAndData(bech32Str, Bech32Encoding.Bech32)
|
||||
network = BtcHumanReadablePart.fromString(hrp).network
|
||||
} yield Bech32Address(network, data)
|
||||
|
||||
@ -346,9 +346,10 @@ object Bech32mAddress extends AddressFactory[Bech32mAddress] {
|
||||
* [[org.bitcoins.core.protocol.BtcHumanReadablePart HumanReadablePart]] &
|
||||
* data part
|
||||
*/
|
||||
override def fromString(bech32m: String): Bech32mAddress = {
|
||||
override def fromString(bech32mStr: String): Bech32mAddress = {
|
||||
val bech32T = for {
|
||||
(hrp, data) <- Bech32.splitToHrpAndData(bech32m, Bech32Encoding.Bech32m)
|
||||
(hrp, data) <- Bech32.splitToHrpAndData(bech32mStr,
|
||||
Bech32Encoding.Bech32m)
|
||||
network = BtcHumanReadablePart.fromString(hrp).network
|
||||
} yield Bech32mAddress(network, data)
|
||||
|
||||
|
@ -1620,7 +1620,7 @@ object WitnessCommitment extends ScriptFactory[WitnessCommitment] {
|
||||
else {
|
||||
val minCommitmentSize = 38
|
||||
val asmBytes = BytesUtil.toByteVector(asm)
|
||||
val Seq(opReturn, pushOp, constant) = asm.take(3)
|
||||
val (opReturn, pushOp, constant) = (asm.head, asm(1), asm(2))
|
||||
opReturn == OP_RETURN && pushOp == BytesToPushOntoStack(36) &&
|
||||
constant.hex.take(
|
||||
8) == commitmentHeader && asmBytes.size >= minCommitmentSize
|
||||
|
@ -51,8 +51,8 @@ sealed abstract class P2WPKHWitnessV0 extends ScriptWitnessV0 {
|
||||
|
||||
def signature: ECDigitalSignature =
|
||||
stack(1) match {
|
||||
case ByteVector.empty => EmptyDigitalSignature
|
||||
case bytes: ByteVector => ECDigitalSignature(bytes)
|
||||
case ByteVector.empty => EmptyDigitalSignature
|
||||
case nonEmpty: ByteVector => ECDigitalSignature(nonEmpty)
|
||||
}
|
||||
|
||||
override def toString =
|
||||
@ -479,8 +479,8 @@ object TaprootScriptPath extends Factory[TaprootScriptPath] {
|
||||
annexOpt: Option[ByteVector],
|
||||
spk: RawScriptPubKey): TaprootScriptPath = {
|
||||
annexOpt match {
|
||||
case Some(annex) =>
|
||||
fromStack(Vector(annex, controlBlock.bytes, spk.asmBytes))
|
||||
case Some(annexBytes) =>
|
||||
fromStack(Vector(annexBytes, controlBlock.bytes, spk.asmBytes))
|
||||
case None =>
|
||||
fromStack(Vector(controlBlock.bytes, spk.asmBytes))
|
||||
}
|
||||
|
@ -44,11 +44,11 @@ object InputUtil {
|
||||
defaultSequence: UInt32 = Policy.sequence): Seq[TransactionInput] = {
|
||||
@tailrec
|
||||
def loop(
|
||||
remaining: Seq[InputInfo],
|
||||
accum: Seq[TransactionInput]): Seq[TransactionInput] =
|
||||
remaining: List[InputInfo],
|
||||
accum: List[TransactionInput]): Seq[TransactionInput] =
|
||||
remaining match {
|
||||
case Nil => accum.reverse
|
||||
case spendingInfo +: newRemaining =>
|
||||
case spendingInfo :: newRemaining =>
|
||||
spendingInfo match {
|
||||
case lockTime: LockTimeInputInfo =>
|
||||
val sequence = lockTime.scriptPubKey match {
|
||||
@ -92,7 +92,7 @@ object InputUtil {
|
||||
}
|
||||
}
|
||||
|
||||
loop(utxos, Nil)
|
||||
loop(utxos.toList, Nil).toVector
|
||||
}
|
||||
|
||||
/** This helper function calculates the appropriate sequence number for each
|
||||
|
@ -67,12 +67,12 @@ object TxUtil {
|
||||
def calcLockTimeForInfos(utxos: Seq[InputInfo]): Try[UInt32] = {
|
||||
@tailrec
|
||||
def loop(
|
||||
remaining: Seq[InputInfo],
|
||||
remaining: List[InputInfo],
|
||||
currentLockTimeOpt: Option[UInt32]): Try[UInt32] =
|
||||
remaining match {
|
||||
case Nil =>
|
||||
Success(currentLockTimeOpt.getOrElse(TransactionConstants.lockTime))
|
||||
case spendingInfo +: newRemaining =>
|
||||
case spendingInfo :: newRemaining =>
|
||||
spendingInfo match {
|
||||
case lockTime: LockTimeInputInfo =>
|
||||
lockTime.scriptPubKey match {
|
||||
@ -117,7 +117,7 @@ object TxUtil {
|
||||
}
|
||||
}
|
||||
|
||||
loop(utxos, None)
|
||||
loop(utxos.toList, None)
|
||||
}
|
||||
|
||||
/** This helper function calculates the appropriate locktime for a
|
||||
|
@ -540,9 +540,9 @@ case class InputPSBTMap(elements: Vector[InputPSBTRecord])
|
||||
*/
|
||||
def addLeaves(rawSPK: RawScriptPubKey, path: Vector[Boolean]): Unit = {
|
||||
rawSPK match {
|
||||
case conditional: ConditionalScriptPubKey =>
|
||||
addLeaves(conditional.trueSPK, path :+ true)
|
||||
addLeaves(conditional.falseSPK, path :+ false)
|
||||
case conditionalSPK: ConditionalScriptPubKey =>
|
||||
addLeaves(conditionalSPK.trueSPK, path :+ true)
|
||||
addLeaves(conditionalSPK.falseSPK, path :+ false)
|
||||
case p2pkWithTimeout: P2PKWithTimeoutScriptPubKey =>
|
||||
addLeaves(P2PKScriptPubKey.fromP2PKWithTimeout(p2pkWithTimeout,
|
||||
timeoutBranch =
|
||||
|
@ -66,7 +66,7 @@ sealed abstract class ScriptParser
|
||||
BytesUtil.decodeHex(BytesUtil.flipEndianness(b))
|
||||
}
|
||||
|
||||
val bytesToPushOntoStack: Vector[ScriptToken] =
|
||||
val bytesToPushOntoStack: Vector[ScriptToken] = {
|
||||
(bytes.size > 75) match {
|
||||
case true =>
|
||||
val scriptNumber = ScriptNumber(
|
||||
@ -79,9 +79,13 @@ sealed abstract class ScriptParser
|
||||
Vector(scriptNumber, OP_PUSHDATA2)
|
||||
case size if size < Int.MaxValue =>
|
||||
Vector(scriptNumber, OP_PUSHDATA4)
|
||||
case size =>
|
||||
sys.error(
|
||||
s"Cannot have size large than Int.MaxValue (${Int.MaxValue}), got=$size")
|
||||
}
|
||||
case false => Vector(BytesToPushOntoStack(bytes.size.toInt))
|
||||
}
|
||||
}
|
||||
|
||||
val pushOpBytes: ByteVector =
|
||||
bytesToPushOntoStack.foldLeft(ByteVector.empty)(_ ++ _.bytes)
|
||||
|
@ -194,9 +194,9 @@ trait BitcoinScriptUtil {
|
||||
*/
|
||||
def isPushOnly(script: Seq[ScriptToken]): Boolean = {
|
||||
@tailrec
|
||||
def loop(tokens: Seq[ScriptToken]): Boolean =
|
||||
def loop(tokens: List[ScriptToken]): Boolean =
|
||||
tokens match {
|
||||
case h +: t =>
|
||||
case h :: t =>
|
||||
h match {
|
||||
case scriptOp: ScriptOperation =>
|
||||
if (scriptOp.opCode < OP_16.opCode) {
|
||||
@ -209,7 +209,7 @@ trait BitcoinScriptUtil {
|
||||
}
|
||||
case Nil => true
|
||||
}
|
||||
loop(script)
|
||||
loop(script.toList)
|
||||
}
|
||||
|
||||
/** Determines if the token being pushed onto the stack is being pushed by the
|
||||
@ -489,7 +489,7 @@ trait BitcoinScriptUtil {
|
||||
def calculateScriptForSigning(
|
||||
spendingTransaction: Transaction,
|
||||
signingInfo: InputSigningInfo[InputInfo],
|
||||
script: Seq[ScriptToken]): Seq[ScriptToken] = {
|
||||
asm: Seq[ScriptToken]): Seq[ScriptToken] = {
|
||||
|
||||
val idx = TxUtil.inputIndex(signingInfo.inputInfo, spendingTransaction)
|
||||
|
||||
@ -528,7 +528,7 @@ trait BitcoinScriptUtil {
|
||||
_: ConditionalScriptPubKey | _: NonStandardScriptPubKey |
|
||||
_: CLTVScriptPubKey | _: CSVScriptPubKey | _: WitnessCommitment |
|
||||
EmptyScriptPubKey =>
|
||||
script
|
||||
asm
|
||||
}
|
||||
}
|
||||
|
||||
@ -558,16 +558,16 @@ trait BitcoinScriptUtil {
|
||||
script: Seq[ScriptToken]): Seq[ScriptToken] = {
|
||||
@tailrec
|
||||
def loop(
|
||||
remainingSigs: Seq[ECDigitalSignature],
|
||||
remainingSigs: List[ECDigitalSignature],
|
||||
scriptTokens: Seq[ScriptToken]): Seq[ScriptToken] = {
|
||||
remainingSigs match {
|
||||
case Nil => scriptTokens
|
||||
case h +: t =>
|
||||
case h :: t =>
|
||||
val newScriptTokens = removeSignatureFromScript(h, scriptTokens)
|
||||
loop(t, newScriptTokens)
|
||||
}
|
||||
}
|
||||
loop(sigs, script)
|
||||
loop(sigs.toList, script).toVector
|
||||
}
|
||||
|
||||
/** Removes the
|
||||
|
@ -50,7 +50,7 @@ object RescanState {
|
||||
case RescanTerminatedEarly =>
|
||||
recursiveRescanP.failure(RescanTerminatedEarly)
|
||||
_isCompletedEarly.set(true)
|
||||
case scala.util.control.NonFatal(_) => // do nothing
|
||||
case _: Throwable => // do nothing
|
||||
}
|
||||
|
||||
/** Useful for determining if the rescan was completed externally by the
|
||||
|
@ -50,6 +50,8 @@ sealed trait InputSigningInfo[+InputType <: InputInfo] {
|
||||
SigVersionWitnessV0
|
||||
case _: P2SHNonSegwitInputInfo | _: RawInputInfo =>
|
||||
SigVersionBase
|
||||
case i: InputInfo =>
|
||||
sys.error(s"Cannot determine SigVersion for unsupported inputInfo=$i")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,11 +92,15 @@ object InternalAddressTag {
|
||||
StorageLocationTag.DeepColdStorage
|
||||
case unknownName: UnknownAddressTagName =>
|
||||
UnknownAddressTag(unknownName, StorageLocationTagType)
|
||||
case a: AddressTagName =>
|
||||
UnknownAddressTag(a.name, StorageLocationTagType)
|
||||
}
|
||||
case AddressLabelTagType =>
|
||||
AddressLabelTag(tagName.name)
|
||||
case unknownType: UnknownAddressTagType =>
|
||||
UnknownAddressTag(tagName, unknownType)
|
||||
case a: AddressTagType =>
|
||||
UnknownAddressTag(a.typeName, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -80,8 +80,8 @@ trait BitcoinSCryptoAsyncTest
|
||||
.to(generatorDrivenConfig.minSize)
|
||||
.map(_ => (genA.sample, genB.sample))
|
||||
.toVector
|
||||
.collect { case (Some(a), Some(b)) =>
|
||||
(a, b)
|
||||
.collect { case (Some(x), Some(y)) =>
|
||||
(x, y)
|
||||
}
|
||||
|
||||
val testRunsF = Future.traverse(samples)(x => func(x._1, x._2))
|
||||
|
@ -11,7 +11,7 @@ import scodec.bits.ByteVector
|
||||
/** Represents an ordered set of MuSig signers and their tweaks. This is the
|
||||
* data required to (non-interactively) compute the aggPubKey.
|
||||
*/
|
||||
trait KeySet {
|
||||
sealed trait KeySet {
|
||||
def keys: Vector[SchnorrPublicKey]
|
||||
|
||||
def tweaks: Vector[MuSigTweak]
|
||||
|
@ -82,7 +82,7 @@ case class DLCContactDAO()(implicit
|
||||
|
||||
def memo: Rep[String] = column("memo")
|
||||
|
||||
def * : ProvenShape[DLCContactDb] =
|
||||
(alias, address, memo) <> (DLCContactDb.tupled, DLCContactDb.unapply)
|
||||
override def * : ProvenShape[DLCContactDb] =
|
||||
(alias, address, memo).<>(DLCContactDb.tupled, DLCContactDb.unapply)
|
||||
}
|
||||
}
|
||||
|
@ -79,16 +79,7 @@ case class DLCDAO()(implicit
|
||||
): Future[Option[DLCDb]] = {
|
||||
val q = table.filter(_.tempContractId === tempContractId)
|
||||
|
||||
safeDatabase.run(q.result).map {
|
||||
case h +: Vector() =>
|
||||
Some(h)
|
||||
case Vector() =>
|
||||
None
|
||||
case dlcs: Vector[DLCDb] =>
|
||||
throw new RuntimeException(
|
||||
s"More than one DLC per tempContractId (${tempContractId.hex}), got: $dlcs"
|
||||
)
|
||||
}
|
||||
safeDatabase.run(q.result).map(_.headOption)
|
||||
}
|
||||
|
||||
def findByTempContractId(
|
||||
@ -99,16 +90,7 @@ case class DLCDAO()(implicit
|
||||
def findByContractId(contractId: ByteVector): Future[Option[DLCDb]] = {
|
||||
val q = table.filter(_.contractId === contractId)
|
||||
|
||||
safeDatabase.run(q.result).map {
|
||||
case h +: Vector() =>
|
||||
Some(h)
|
||||
case Vector() =>
|
||||
None
|
||||
case dlcs: Vector[DLCDb] =>
|
||||
throw new RuntimeException(
|
||||
s"More than one DLC per contractId (${contractId.toHex}), got: $dlcs"
|
||||
)
|
||||
}
|
||||
safeDatabase.run(q.result).map(_.headOption)
|
||||
}
|
||||
|
||||
def findByFundingOutPoint(
|
||||
|
@ -70,6 +70,6 @@ case class IncomingDLCOfferDAO()(implicit
|
||||
peer,
|
||||
message,
|
||||
offerTLV
|
||||
) <> (IncomingDLCOfferDb.tupled, IncomingDLCOfferDb.unapply)
|
||||
).<>(IncomingDLCOfferDb.tupled, IncomingDLCOfferDb.unapply)
|
||||
}
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ object MempoolSpaceProvider extends FeeProviderFactory[MempoolSpaceProvider] {
|
||||
}
|
||||
}
|
||||
|
||||
abstract class MempoolSpaceTarget
|
||||
sealed abstract class MempoolSpaceTarget
|
||||
|
||||
object MempoolSpaceTarget {
|
||||
|
||||
|
@ -63,9 +63,9 @@ class BIP39KeyManagerApiTest extends KeyManagerApiUnitTest {
|
||||
WalletStorage.decryptSeedFromDisk(seedPath, aesPasswordOpt)
|
||||
|
||||
decryptedE match {
|
||||
case Right(mnemonic: DecryptedMnemonic) =>
|
||||
case Right(m: DecryptedMnemonic) =>
|
||||
assert(
|
||||
mnemonic.mnemonicCode.toEntropy == entropy,
|
||||
m.mnemonicCode.toEntropy == entropy,
|
||||
s"We did not read the same entropy that we wrote!"
|
||||
)
|
||||
case Right(xprv: DecryptedExtPrivKey) =>
|
||||
|
@ -27,7 +27,7 @@ class BIP39LockedKeyManagerApiTest extends KeyManagerApiUnitTest {
|
||||
)
|
||||
|
||||
val unlockedKm = unlockedE match {
|
||||
case Right(km) => km
|
||||
case Right(k) => k
|
||||
case Left(err) => fail(s"Failed to unlock key manager ${err}")
|
||||
}
|
||||
|
||||
|
@ -524,8 +524,8 @@ case class PeerManager(
|
||||
initState: NodeState
|
||||
): Sink[NodeStreamMessage, Future[NodeState]] = {
|
||||
Sink.foldAsync(initState) {
|
||||
case (state, s: StartSync) =>
|
||||
val nodeStateOptF: Future[Option[NodeState]] = s.peerOpt match {
|
||||
case (state, startSync: StartSync) =>
|
||||
val nodeStateOptF: Future[Option[NodeState]] = startSync.peerOpt match {
|
||||
case Some(p) =>
|
||||
state match {
|
||||
case s: SyncNodeState if !s.waitingForDisconnection.contains(p) =>
|
||||
|
@ -59,39 +59,36 @@ case class DataMessageHandler(
|
||||
): Future[DataMessageHandler] = {
|
||||
state match {
|
||||
case syncState: SyncNodeState =>
|
||||
syncState match {
|
||||
case state @ (_: HeaderSync | _: FilterHeaderSync | _: FilterSync) =>
|
||||
val syncPeer = state.syncPeer
|
||||
val isQueryTimedOut = state.isQueryTimedOut(appConfig.queryWaitTime)
|
||||
if (peerData.peer != syncPeer && !isQueryTimedOut) {
|
||||
// ignore message from peers that we aren't syncing with during IBD
|
||||
logger.debug(
|
||||
s"Ignoring message ${payload.commandName} from peer=${peerData.peer} in state=$state because we are syncing with this peer currently. syncPeer=$syncPeer"
|
||||
)
|
||||
Future.successful(this)
|
||||
} else {
|
||||
val dmh = if (isQueryTimedOut) {
|
||||
// if query is timed out, we need to transition back to DoneSyncing
|
||||
// to avoid getting stuck in a state when a peer does not respond to us
|
||||
// see: https://github.com/bitcoin-s/bitcoin-s/issues/5429
|
||||
logger.info(
|
||||
s"Query timed out with in state=$state, received payload=${payload.commandName}")
|
||||
copy(state = state.toDoneSyncing)
|
||||
} else {
|
||||
this
|
||||
}
|
||||
val resultF =
|
||||
dmh.handleDataPayloadValidState(payload, peerData)
|
||||
resultF.failed.foreach { err =>
|
||||
logger.error(
|
||||
s"Failed to handle data payload=${payload} from peer=${peerData.peer} in state=$state errMsg=${err.getMessage}",
|
||||
err
|
||||
)
|
||||
}
|
||||
resultF.recoverWith { case NonFatal(_) =>
|
||||
Future.successful(this)
|
||||
}
|
||||
}
|
||||
val syncPeer = syncState.syncPeer
|
||||
val isQueryTimedOut = syncState.isQueryTimedOut(appConfig.queryWaitTime)
|
||||
if (peerData.peer != syncPeer && !isQueryTimedOut) {
|
||||
// ignore message from peers that we aren't syncing with during IBD
|
||||
logger.debug(
|
||||
s"Ignoring message ${payload.commandName} from peer=${peerData.peer} in state=$state because we are syncing with this peer currently. syncPeer=$syncPeer"
|
||||
)
|
||||
Future.successful(this)
|
||||
} else {
|
||||
val dmh = if (isQueryTimedOut) {
|
||||
// if query is timed out, we need to transition back to DoneSyncing
|
||||
// to avoid getting stuck in a state when a peer does not respond to us
|
||||
// see: https://github.com/bitcoin-s/bitcoin-s/issues/5429
|
||||
logger.info(
|
||||
s"Query timed out with in state=$state, received payload=${payload.commandName}")
|
||||
copy(state = state.toDoneSyncing)
|
||||
} else {
|
||||
this
|
||||
}
|
||||
val resultF =
|
||||
dmh.handleDataPayloadValidState(payload, peerData)
|
||||
resultF.failed.foreach { err =>
|
||||
logger.error(
|
||||
s"Failed to handle data payload=${payload} from peer=${peerData.peer} in state=$state errMsg=${err.getMessage}",
|
||||
err
|
||||
)
|
||||
}
|
||||
resultF.recoverWith { case NonFatal(_) =>
|
||||
Future.successful(this)
|
||||
}
|
||||
}
|
||||
case _: DoneSyncing =>
|
||||
val resultF = handleDataPayloadValidState(payload, peerData)
|
||||
@ -964,8 +961,8 @@ case class DataMessageHandler(
|
||||
syncIfHeadersAhead(filterSyncState, peerMessageSenderApi)
|
||||
} else {
|
||||
val res = filterHeaderSyncStateOpt match {
|
||||
case Some(filterSyncState) =>
|
||||
filterSyncState.copy(filterBatchCache = newBatch)
|
||||
case Some(f) =>
|
||||
f.copy(filterBatchCache = newBatch)
|
||||
case None =>
|
||||
val d = filterSyncState.toDoneSyncing
|
||||
d
|
||||
|
@ -146,26 +146,7 @@ object CommonSettings {
|
||||
private val scala2_13CompilerLinting = {
|
||||
Seq(
|
||||
"-Xfatal-warnings",
|
||||
"-Xlint:unused",
|
||||
"-Xlint:adapted-args",
|
||||
"-Xlint:nullary-unit",
|
||||
"-Xlint:inaccessible",
|
||||
"-Xlint:infer-any",
|
||||
"-Xlint:missing-interpolator",
|
||||
"-Xlint:eta-zero",
|
||||
"-Xlint:eta-sam",
|
||||
"-Xlint:doc-detached",
|
||||
"-Xlint:private-shadow",
|
||||
"-Xlint:type-parameter-shadow",
|
||||
"-Xlint:poly-implicit-overload",
|
||||
"-Xlint:option-implicit",
|
||||
"-Xlint:delayedinit-select",
|
||||
"-Xlint:package-object-classes",
|
||||
"-Xlint:stars-align",
|
||||
"-Xlint:constant",
|
||||
"-Xlint:nonlocal-return",
|
||||
"-Xlint:implicit-not-found",
|
||||
"-Xlint:serial"
|
||||
"-Xlint"
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -137,10 +137,13 @@ object HDGenerators {
|
||||
def hdPathWithConstructor: Gen[(HDPath, HDPathConstructor)] =
|
||||
for {
|
||||
path <- hdPath
|
||||
} yield path match {
|
||||
case legacy: LegacyHDPath => (legacy, LegacyHDPath(_))
|
||||
case nested: NestedSegWitHDPath => (nested, NestedSegWitHDPath(_))
|
||||
case segwit: SegWitHDPath => (segwit, SegWitHDPath(_))
|
||||
} yield {
|
||||
path match {
|
||||
case legacy: LegacyHDPath => (legacy, LegacyHDPath(_))
|
||||
case nested: NestedSegWitHDPath => (nested, NestedSegWitHDPath(_))
|
||||
case segwit: SegWitHDPath => (segwit, SegWitHDPath(_))
|
||||
case h: HDPath => sys.error(s"Unsupported hdPath=$h")
|
||||
}
|
||||
}
|
||||
|
||||
/** Generates a pair of paths that can be diffed.
|
||||
|
@ -27,7 +27,7 @@ import org.bitcoins.crypto.{
|
||||
DoubleSha256DigestBE,
|
||||
ECPublicKey
|
||||
}
|
||||
import org.bitcoins.rpc.client.common.BitcoindVersion.*
|
||||
import org.bitcoins.rpc.client.common.BitcoindVersion.{V26, V27, V28, Unknown}
|
||||
import org.bitcoins.rpc.client.common.{BitcoindRpcClient, BitcoindVersion}
|
||||
import org.bitcoins.rpc.client.v26.BitcoindV26RpcClient
|
||||
import org.bitcoins.rpc.client.v27.BitcoindV27RpcClient
|
||||
|
@ -65,12 +65,12 @@ class TorController(
|
||||
context.become {
|
||||
case data: ByteString =>
|
||||
connection ! Write(data)
|
||||
case c @ CommandFailed(_: Write) =>
|
||||
case cmdFailed @ CommandFailed(_: Write) =>
|
||||
// O/S buffer was full
|
||||
protocolHandler ! SendFailed
|
||||
log.error(
|
||||
"Tor command failed",
|
||||
c.cause.getOrElse(new RuntimeException("Unknown error"))
|
||||
cmdFailed.cause.getOrElse(new RuntimeException("Unknown error"))
|
||||
)
|
||||
case Received(data) =>
|
||||
protocolHandler ! data
|
||||
|
@ -149,6 +149,8 @@ class TorProtocolHandler(
|
||||
override def unhandled(message: Any): Unit = message match {
|
||||
case GetOnionAddress =>
|
||||
sender() ! address
|
||||
case a: Any =>
|
||||
log.warning(s"Unhandled TorProtocolHandler message=$a")
|
||||
}
|
||||
|
||||
private def processOnionResponse(res: Map[String, String]): String = {
|
||||
|
@ -474,6 +474,8 @@ case class AccountHandling(
|
||||
nestedPath,
|
||||
networkParameters
|
||||
)
|
||||
case h: HDPath =>
|
||||
sys.error(s"Unsupported HDPath type=$h for calculating addresses")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,17 +64,7 @@ case class AccountDAO()(implicit
|
||||
.filter(_.purpose === account.purpose)
|
||||
.filter(_.index === account.index)
|
||||
|
||||
q.result.map {
|
||||
case h +: Vector() =>
|
||||
Some(h)
|
||||
case Vector() =>
|
||||
None
|
||||
case accounts: Vector[AccountDb] =>
|
||||
// yikes, we should not have more the one account per coin type/purpose
|
||||
throw new RuntimeException(
|
||||
s"More than one account per account=${account}, got=${accounts}"
|
||||
)
|
||||
}
|
||||
q.result.map(_.headOption)
|
||||
}
|
||||
|
||||
def findByAccount(account: HDAccount): Future[Option[AccountDb]] = {
|
||||
|
@ -81,17 +81,7 @@ trait TxDAO[DbEntryType <: TxDB]
|
||||
table
|
||||
.filter(_.txIdBE === txIdBE)
|
||||
.result
|
||||
.map {
|
||||
case h +: Vector() =>
|
||||
Some(h)
|
||||
case Vector() =>
|
||||
None
|
||||
case txs: Vector[DbEntryType] =>
|
||||
// yikes, we should not have more the one transaction per id
|
||||
throw new RuntimeException(
|
||||
s"More than one transaction per id=${txIdBE.hex}, got=$txs"
|
||||
)
|
||||
}
|
||||
.map(_.headOption)
|
||||
}
|
||||
|
||||
def findByTxId(txIdBE: DoubleSha256DigestBE): Future[Option[DbEntryType]] = {
|
||||
|
Loading…
Reference in New Issue
Block a user