mirror of
https://github.com/bitcoin-s/bitcoin-s.git
synced 2024-11-19 09:52:09 +01:00
2020 04 08 multi db dao (#1355)
* Implement Slick's multi database support via our DAOs WIP2 work on chain project WIP make JdbcProfieComponent so we have a common JdbcProfile WIP2 Clean up a few things WIP got CRUDAutoInc compiling Get closer to everything compiling in the chain project Begin moving database code into JdbcProfileComponent WIP source compiling Get dbCommonsTest/ test cases passing Get rest of tests compiling and passing * Rebase onto master, fix conflicts * Run scalafmt * Run scalafmt again * Cleanup a few compiler warnings by adding type annotations to thigns * Explicitly cast tables where they are needed in the wallet project where we need TableQuery for foreignkey / joins in slick * Redo definition of 'table' to use the profile.api.TableQuery in scope * Rename all *Table.scala files to *Db.scala * First crack at address ben's code review * Fix docs compile issue * Fixed casting (#30) * Fixed AppConfig casting * Fixed TableQuery casting in DbManagement subtypes * Fixed casts in DAOs relating to internal types * Address nadav's code review, move JdbcProfileComponent into it's own file * Remove unused listTables() method Co-authored-by: Nadav Kohen <nadavk25@gmail.com>
This commit is contained in:
parent
3f14add2d0
commit
4c3f6f8e67
@ -21,7 +21,7 @@ import org.bitcoins.db.AppConfig
|
||||
*/
|
||||
case class BitcoinSAppConfig(
|
||||
private val directory: Path,
|
||||
private val confs: Config*) {
|
||||
private val confs: Config*)(implicit ec: ExecutionContext) {
|
||||
val walletConf = WalletAppConfig(directory, confs: _*)
|
||||
val nodeConf = NodeAppConfig(directory, confs: _*)
|
||||
val chainConf = ChainAppConfig(directory, confs: _*)
|
||||
@ -55,7 +55,8 @@ object BitcoinSAppConfig {
|
||||
/** Constructs an app configuration from the default Bitcoin-S
|
||||
* data directory and given list of configuration overrides.
|
||||
*/
|
||||
def fromDefaultDatadir(confs: Config*): BitcoinSAppConfig =
|
||||
def fromDefaultDatadir(confs: Config*)(
|
||||
implicit ec: ExecutionContext): BitcoinSAppConfig =
|
||||
BitcoinSAppConfig(AppConfig.DEFAULT_BITCOIN_S_DATADIR, confs: _*)
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
@ -21,9 +21,12 @@ import org.bitcoins.wallet.models.AccountDAO
|
||||
import org.bitcoins.wallet.{LockedWallet, Wallet}
|
||||
|
||||
import scala.concurrent.duration._
|
||||
import scala.concurrent.{Await, Future}
|
||||
import scala.concurrent.{Await, ExecutionContext, Future}
|
||||
|
||||
object Main extends App {
|
||||
implicit val system = ActorSystem("bitcoin-s")
|
||||
implicit val ec: ExecutionContext = system.dispatcher
|
||||
|
||||
implicit val conf = {
|
||||
val dataDirIndexOpt = args.zipWithIndex
|
||||
.find(_._1.toLowerCase == "--datadir")
|
||||
@ -45,9 +48,6 @@ object Main extends App {
|
||||
"Either Neutrino or SPV mode should be enabled")
|
||||
implicit val chainConf: ChainAppConfig = conf.chainConf
|
||||
|
||||
implicit val system = ActorSystem("bitcoin-s")
|
||||
import system.dispatcher
|
||||
|
||||
val peerSocket =
|
||||
parseInetSocketAddress(nodeConf.peers.head, nodeConf.network.port)
|
||||
val peer = Peer.fromSocket(peerSocket)
|
||||
|
@ -16,25 +16,28 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
*/
|
||||
case class ChainAppConfig(
|
||||
private val directory: Path,
|
||||
private val confs: Config*)
|
||||
extends AppConfig {
|
||||
private val confs: Config*)(implicit override val ec: ExecutionContext)
|
||||
extends AppConfig
|
||||
with ChainDbManagement
|
||||
with JdbcProfileComponent[ChainAppConfig] {
|
||||
|
||||
override protected[bitcoins] def configOverrides: List[Config] = confs.toList
|
||||
override protected[bitcoins] val moduleName: String = "chain"
|
||||
override protected[bitcoins] def moduleName: String = "chain"
|
||||
override protected[bitcoins] type ConfigType = ChainAppConfig
|
||||
override protected[bitcoins] def newConfigOfType(
|
||||
configs: Seq[Config]): ChainAppConfig =
|
||||
ChainAppConfig(directory, configs: _*)
|
||||
|
||||
protected[bitcoins] def baseDatadir: Path = directory
|
||||
|
||||
override def appConfig: ChainAppConfig = this
|
||||
|
||||
/**
|
||||
* Checks whether or not the chain project is initialized by
|
||||
* trying to read the genesis block header from our block
|
||||
* header table
|
||||
*/
|
||||
def isInitialized()(implicit ec: ExecutionContext): Future[Boolean] = {
|
||||
val bhDAO =
|
||||
BlockHeaderDAO()(ec = implicitly[ExecutionContext], appConfig = this)
|
||||
val bhDAO = BlockHeaderDAO()(ec, appConfig)
|
||||
val isDefinedOptF = {
|
||||
bhDAO.read(chain.genesisBlock.blockHeader.hashBE).map(_.isDefined)
|
||||
}
|
||||
@ -53,7 +56,7 @@ case class ChainAppConfig(
|
||||
* and inserts preliminary data like the genesis block header
|
||||
* */
|
||||
override def initialize()(implicit ec: ExecutionContext): Future[Unit] = {
|
||||
val numMigrations = ChainDbManagement.migrate(this)
|
||||
val numMigrations = migrate()
|
||||
|
||||
logger.info(s"Applied ${numMigrations} to chain project")
|
||||
|
||||
@ -66,8 +69,7 @@ case class ChainAppConfig(
|
||||
BlockHeaderDbHelper.fromBlockHeader(height = 0,
|
||||
bh =
|
||||
chain.genesisBlock.blockHeader)
|
||||
val blockHeaderDAO =
|
||||
BlockHeaderDAO()(ec = implicitly[ExecutionContext], appConfig = this)
|
||||
val blockHeaderDAO = BlockHeaderDAO()(ec, appConfig)
|
||||
val bhCreatedF = blockHeaderDAO.create(genesisHeader)
|
||||
bhCreatedF.flatMap { _ =>
|
||||
logger.info(s"Inserted genesis block header into DB")
|
||||
@ -97,6 +99,7 @@ object ChainAppConfig {
|
||||
/** Constructs a chain verification configuration from the default Bitcoin-S
|
||||
* data directory and given list of configuration overrides.
|
||||
*/
|
||||
def fromDefaultDatadir(confs: Config*): ChainAppConfig =
|
||||
def fromDefaultDatadir(confs: Config*)(
|
||||
implicit ec: ExecutionContext): ChainAppConfig =
|
||||
ChainAppConfig(AppConfig.DEFAULT_BITCOIN_S_DATADIR, confs: _*)
|
||||
}
|
||||
|
@ -1,9 +1,12 @@
|
||||
package org.bitcoins.chain.db
|
||||
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.chain.models.{BlockHeaderTable, CompactFilterHeaderTable, CompactFilterTable}
|
||||
import org.bitcoins.db.DbManagement
|
||||
import slick.lifted.TableQuery
|
||||
import org.bitcoins.chain.models.{
|
||||
BlockHeaderDAO,
|
||||
CompactFilterDAO,
|
||||
CompactFilterHeaderDAO
|
||||
}
|
||||
import org.bitcoins.db.{DbManagement, JdbcProfileComponent}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
@ -11,25 +14,28 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
* Responsible for creating and destroying database
|
||||
* tables inside of the Chain project.
|
||||
*/
|
||||
sealed abstract class ChainDbManagement extends DbManagement {
|
||||
trait ChainDbManagement extends DbManagement {
|
||||
_: JdbcProfileComponent[ChainAppConfig] =>
|
||||
import profile.api._
|
||||
|
||||
private val chainTable: TableQuery[BlockHeaderTable] =
|
||||
TableQuery[BlockHeaderTable]
|
||||
def ec: ExecutionContext
|
||||
|
||||
private val filterHeaderTable: TableQuery[CompactFilterHeaderTable] =
|
||||
TableQuery[CompactFilterHeaderTable]
|
||||
private lazy val chainTable: TableQuery[Table[_]] =
|
||||
BlockHeaderDAO()(ec, appConfig).table
|
||||
|
||||
private val filterTable: TableQuery[CompactFilterTable] =
|
||||
TableQuery[CompactFilterTable]
|
||||
private lazy val filterHeaderTable: TableQuery[Table[_]] = {
|
||||
CompactFilterHeaderDAO()(ec, appConfig).table
|
||||
}
|
||||
|
||||
override val allTables = List(chainTable, filterHeaderTable, filterTable)
|
||||
private lazy val filterTable: TableQuery[Table[_]] = {
|
||||
CompactFilterDAO()(ec, appConfig).table
|
||||
}
|
||||
|
||||
def createHeaderTable(createIfNotExists: Boolean = true)(
|
||||
implicit config: ChainAppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
createTable(chainTable, createIfNotExists)
|
||||
override lazy val allTables: List[TableQuery[Table[_]]] =
|
||||
List(chainTable, filterHeaderTable, filterTable)
|
||||
|
||||
def createHeaderTable(createIfNotExists: Boolean = true): Future[Unit] = {
|
||||
createTable(chainTable, createIfNotExists)(ec)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object ChainDbManagement extends ChainDbManagement
|
||||
|
@ -3,9 +3,8 @@ package org.bitcoins.chain.models
|
||||
import org.bitcoins.chain.blockchain.Blockchain
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.number.{Int32, UInt32}
|
||||
import org.bitcoins.db._
|
||||
import slick.jdbc.SQLiteProfile
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
@ -17,34 +16,36 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
*/
|
||||
case class BlockHeaderDAO()(
|
||||
implicit ec: ExecutionContext,
|
||||
appConfig: ChainAppConfig)
|
||||
extends CRUD[BlockHeaderDb, DoubleSha256DigestBE] {
|
||||
override val appConfig: ChainAppConfig)
|
||||
extends CRUD[BlockHeaderDb, DoubleSha256DigestBE]
|
||||
with SlickUtil[BlockHeaderDb, DoubleSha256DigestBE] {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
import profile.api._
|
||||
|
||||
override val table: TableQuery[BlockHeaderTable] =
|
||||
TableQuery[BlockHeaderTable]
|
||||
override val table =
|
||||
profile.api.TableQuery[BlockHeaderTable]
|
||||
|
||||
/** Creates all of the given [[BlockHeaderDb]] in the database */
|
||||
override def createAll(
|
||||
headers: Vector[BlockHeaderDb]): Future[Vector[BlockHeaderDb]] = {
|
||||
SlickUtil.createAllNoAutoInc(ts = headers,
|
||||
database = database,
|
||||
table = table)
|
||||
createAllNoAutoInc(ts = headers, database = safeDatabase)
|
||||
}
|
||||
|
||||
override protected def findAll(
|
||||
ts: Vector[BlockHeaderDb]): Query[Table[_], BlockHeaderDb, Seq] = {
|
||||
override protected def findAll(ts: Vector[BlockHeaderDb]): Query[
|
||||
BlockHeaderTable,
|
||||
BlockHeaderDb,
|
||||
Seq] = {
|
||||
findByPrimaryKeys(ts.map(_.hashBE))
|
||||
}
|
||||
|
||||
def findByHash(hash: DoubleSha256DigestBE): Future[Option[BlockHeaderDb]] = {
|
||||
val query = findByPrimaryKey(hash).result
|
||||
database.runVec(query).map(_.headOption)
|
||||
safeDatabase.runVec(query).map(_.headOption)
|
||||
}
|
||||
|
||||
override def findByPrimaryKeys(hashes: Vector[DoubleSha256DigestBE]): Query[
|
||||
Table[_],
|
||||
BlockHeaderTable,
|
||||
BlockHeaderDb,
|
||||
Seq] = {
|
||||
table.filter(_.hash.inSet(hashes))
|
||||
@ -114,10 +115,10 @@ case class BlockHeaderDAO()(
|
||||
/** Retrieves a [[BlockHeaderDb]] at the given height */
|
||||
def getAtHeight(height: Int): Future[Vector[BlockHeaderDb]] = {
|
||||
val query = getAtHeightQuery(height)
|
||||
database.runVec(query)
|
||||
safeDatabase.runVec(query)
|
||||
}
|
||||
|
||||
def getAtHeightQuery(height: Int): SQLiteProfile.StreamingProfileAction[
|
||||
def getAtHeightQuery(height: Int): profile.StreamingProfileAction[
|
||||
Seq[BlockHeaderDb],
|
||||
BlockHeaderDb,
|
||||
Effect.Read] = {
|
||||
@ -172,12 +173,12 @@ case class BlockHeaderDAO()(
|
||||
/** Gets Block Headers between (inclusive) from and to, could be out of order */
|
||||
def getBetweenHeights(from: Int, to: Int): Future[Vector[BlockHeaderDb]] = {
|
||||
val query = getBetweenHeightsQuery(from, to)
|
||||
database.runVec(query)
|
||||
safeDatabase.runVec(query)
|
||||
}
|
||||
|
||||
def getBetweenHeightsQuery(
|
||||
from: Int,
|
||||
to: Int): SQLiteProfile.StreamingProfileAction[
|
||||
to: Int): profile.StreamingProfileAction[
|
||||
Seq[BlockHeaderDb],
|
||||
BlockHeaderDb,
|
||||
Effect.Read] = {
|
||||
@ -191,7 +192,7 @@ case class BlockHeaderDAO()(
|
||||
result
|
||||
}
|
||||
|
||||
private val maxHeightQuery: SQLiteProfile.ProfileAction[
|
||||
private val maxHeightQuery: profile.ProfileAction[
|
||||
Int,
|
||||
NoStream,
|
||||
Effect.Read] = {
|
||||
@ -202,7 +203,7 @@ case class BlockHeaderDAO()(
|
||||
/** Returns the chainTips in our database. This can be multiple headers if we have
|
||||
* competing blockchains (fork) */
|
||||
def chainTips: Future[Vector[BlockHeaderDb]] = {
|
||||
logger.debug(s"Getting chaintips from: ${database.config.dbConfig.config}")
|
||||
logger.debug(s"Getting chaintips from: ${dbConfig.config}")
|
||||
val aggregate = {
|
||||
maxHeightQuery.flatMap { height =>
|
||||
logger.debug(s"Max block height: $height")
|
||||
@ -214,7 +215,7 @@ case class BlockHeaderDAO()(
|
||||
}
|
||||
}
|
||||
|
||||
database.runVec(aggregate)
|
||||
safeDatabase.runVec(aggregate)
|
||||
}
|
||||
|
||||
/** Returns competing blockchains that are contained in our BlockHeaderDAO
|
||||
@ -264,4 +265,45 @@ case class BlockHeaderDAO()(
|
||||
}
|
||||
}
|
||||
|
||||
/** A table that stores block headers related to a blockchain */
|
||||
class BlockHeaderTable(tag: Tag)
|
||||
extends Table[BlockHeaderDb](tag, "block_headers") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash", O.PrimaryKey)
|
||||
|
||||
def version = column[Int32]("version")
|
||||
|
||||
def previousBlockHash = column[DoubleSha256DigestBE]("previous_block_hash")
|
||||
|
||||
def merkleRootHash = column[DoubleSha256DigestBE]("merkle_root_hash")
|
||||
|
||||
def time = column[UInt32]("time")
|
||||
|
||||
def nBits = column[UInt32]("n_bits")
|
||||
|
||||
def nonce = column[UInt32]("nonce")
|
||||
|
||||
def hex = column[String]("hex")
|
||||
|
||||
/** The sql index for searching based on [[height]] */
|
||||
def heightIndex = index("block_headers_height_index", height)
|
||||
|
||||
def hashIndex = index("block_headers_hash_index", hash)
|
||||
|
||||
def * = {
|
||||
(height,
|
||||
hash,
|
||||
version,
|
||||
previousBlockHash,
|
||||
merkleRootHash,
|
||||
time,
|
||||
nBits,
|
||||
nonce,
|
||||
hex).<>(BlockHeaderDb.tupled, BlockHeaderDb.unapply)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ package org.bitcoins.chain.models
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.number.{Int32, UInt32}
|
||||
import org.bitcoins.core.protocol.blockchain.BlockHeader
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
case class BlockHeaderDb(
|
||||
height: Int,
|
||||
@ -45,45 +44,3 @@ object BlockHeaderDbHelper {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** A table that stores block headers related to a blockchain */
|
||||
class BlockHeaderTable(tag: Tag)
|
||||
extends Table[BlockHeaderDb](tag, "block_headers") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash", O.PrimaryKey)
|
||||
|
||||
def version = column[Int32]("version")
|
||||
|
||||
def previousBlockHash = column[DoubleSha256DigestBE]("previous_block_hash")
|
||||
|
||||
def merkleRootHash = column[DoubleSha256DigestBE]("merkle_root_hash")
|
||||
|
||||
def time = column[UInt32]("time")
|
||||
|
||||
def nBits = column[UInt32]("n_bits")
|
||||
|
||||
def nonce = column[UInt32]("nonce")
|
||||
|
||||
def hex = column[String]("hex")
|
||||
|
||||
/** The sql index for searching based on [[height]] */
|
||||
def heightIndex = index("block_headers_height_index", height)
|
||||
|
||||
def hashIndex = index("block_headers_hash_index", hash)
|
||||
|
||||
def * = {
|
||||
(height,
|
||||
hash,
|
||||
version,
|
||||
previousBlockHash,
|
||||
merkleRootHash,
|
||||
time,
|
||||
nBits,
|
||||
nonce,
|
||||
hex).<>(BlockHeaderDb.tupled, BlockHeaderDb.unapply)
|
||||
}
|
||||
|
||||
}
|
@ -2,32 +2,56 @@ package org.bitcoins.chain.models
|
||||
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.gcs.FilterType
|
||||
import org.bitcoins.db.{CRUD, SlickUtil}
|
||||
import slick.jdbc.SQLiteProfile
|
||||
import slick.lifted.TableQuery
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class CompactFilterDAO()(
|
||||
implicit ec: ExecutionContext,
|
||||
appConfig: ChainAppConfig)
|
||||
extends CRUD[CompactFilterDb, DoubleSha256DigestBE] {
|
||||
override val appConfig: ChainAppConfig)
|
||||
extends CRUD[CompactFilterDb, DoubleSha256DigestBE]
|
||||
with SlickUtil[CompactFilterDb, DoubleSha256DigestBE] {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
import profile.api._
|
||||
|
||||
override val table = TableQuery[CompactFilterTable]
|
||||
class CompactFilterTable(tag: Tag)
|
||||
extends Table[CompactFilterDb](tag, "cfilters") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash")
|
||||
|
||||
def filterType = column[FilterType]("filter_type")
|
||||
|
||||
def bytes = column[ByteVector]("bytes")
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def blockHash = column[DoubleSha256DigestBE]("block_hash", O.PrimaryKey)
|
||||
|
||||
def heightIndex = index("cfilters_height_index", height)
|
||||
|
||||
def hashIndex = index("cfilters_hash_index", hash)
|
||||
|
||||
override def * = {
|
||||
(hash, filterType, bytes, height, blockHash) <> (CompactFilterDb.tupled, CompactFilterDb.unapply)
|
||||
}
|
||||
}
|
||||
|
||||
override val table: profile.api.TableQuery[CompactFilterTable] = {
|
||||
TableQuery[CompactFilterTable]
|
||||
}
|
||||
|
||||
override def createAll(
|
||||
filters: Vector[CompactFilterDb]): Future[Vector[CompactFilterDb]] = {
|
||||
SlickUtil.createAllNoAutoInc(ts = filters,
|
||||
database = database,
|
||||
table = table)
|
||||
createAllNoAutoInc(ts = filters, database = safeDatabase)
|
||||
}
|
||||
|
||||
/** Finds the rows that correlate to the given primary keys */
|
||||
override protected def findByPrimaryKeys(
|
||||
ids: Vector[DoubleSha256DigestBE]): Query[
|
||||
Table[_],
|
||||
Table[CompactFilterDb],
|
||||
CompactFilterDb,
|
||||
Seq] = {
|
||||
table.filter(_.blockHash.inSet(ids))
|
||||
@ -46,11 +70,10 @@ case class CompactFilterDAO()(
|
||||
/** Retrieves a [[CompactFilterDb]] at the given height */
|
||||
def getAtHeight(height: Int): Future[Vector[CompactFilterDb]] = {
|
||||
val query = getAtHeightQuery(height)
|
||||
database.runVec(query)
|
||||
safeDatabase.runVec(query)
|
||||
}
|
||||
|
||||
private def getAtHeightQuery(
|
||||
height: Int): SQLiteProfile.StreamingProfileAction[
|
||||
private def getAtHeightQuery(height: Int): profile.StreamingProfileAction[
|
||||
Seq[CompactFilterDb],
|
||||
CompactFilterDb,
|
||||
Effect.Read] = {
|
||||
@ -60,11 +83,11 @@ case class CompactFilterDAO()(
|
||||
/** Returns the maximum block height from our database */
|
||||
def maxHeight: Future[Int] = {
|
||||
val query = maxHeightQuery
|
||||
val result = database.run(query)
|
||||
val result = safeDatabase.run(query)
|
||||
result
|
||||
}
|
||||
|
||||
private val maxHeightQuery: SQLiteProfile.ProfileAction[
|
||||
private val maxHeightQuery: profile.ProfileAction[
|
||||
Int,
|
||||
NoStream,
|
||||
Effect.Read] = {
|
||||
@ -75,16 +98,15 @@ case class CompactFilterDAO()(
|
||||
/** Gets filters between (inclusive) from and to, could be out of order */
|
||||
def getBetweenHeights(from: Int, to: Int): Future[Vector[CompactFilterDb]] = {
|
||||
val query = getBetweenHeightsQuery(from, to)
|
||||
database.runVec(query)
|
||||
safeDatabase.runVec(query)
|
||||
}
|
||||
|
||||
private def getBetweenHeightsQuery(
|
||||
from: Int,
|
||||
to: Int): SQLiteProfile.StreamingProfileAction[
|
||||
to: Int): profile.StreamingProfileAction[
|
||||
Seq[CompactFilterDb],
|
||||
CompactFilterDb,
|
||||
Effect.Read] = {
|
||||
table.filter(header => header.height >= from && header.height <= to).result
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -4,8 +4,6 @@ import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.gcs.{BlockFilter, FilterType, GolombFilter}
|
||||
import org.bitcoins.core.util.CryptoUtil
|
||||
import scodec.bits.ByteVector
|
||||
import slick.lifted.Tag
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
case class CompactFilterDb(
|
||||
hashBE: DoubleSha256DigestBE,
|
||||
@ -44,26 +42,3 @@ object CompactFilterDbHelper {
|
||||
height,
|
||||
blockHash)
|
||||
}
|
||||
|
||||
class CompactFilterTable(tag: Tag)
|
||||
extends Table[CompactFilterDb](tag, "cfilters") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash")
|
||||
|
||||
def filterType = column[FilterType]("filter_type")
|
||||
|
||||
def bytes = column[ByteVector]("bytes")
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def blockHash = column[DoubleSha256DigestBE]("block_hash", O.PrimaryKey)
|
||||
|
||||
def heightIndex = index("cfilters_height_index", height)
|
||||
|
||||
def hashIndex = index("cfilters_hash_index", hash)
|
||||
|
||||
override def * = {
|
||||
(hash, filterType, bytes, height, blockHash) <> (CompactFilterDb.tupled, CompactFilterDb.unapply)
|
||||
}
|
||||
}
|
@ -3,31 +3,53 @@ package org.bitcoins.chain.models
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.db.{CRUD, SlickUtil}
|
||||
import slick.jdbc.SQLiteProfile
|
||||
import slick.lifted.TableQuery
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class CompactFilterHeaderDAO()(
|
||||
implicit ec: ExecutionContext,
|
||||
appConfig: ChainAppConfig)
|
||||
extends CRUD[CompactFilterHeaderDb, DoubleSha256DigestBE] {
|
||||
override val appConfig: ChainAppConfig)
|
||||
extends CRUD[CompactFilterHeaderDb, DoubleSha256DigestBE]
|
||||
with SlickUtil[CompactFilterHeaderDb, DoubleSha256DigestBE] {
|
||||
import profile.api._
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
override val table = TableQuery[CompactFilterHeaderTable]
|
||||
class CompactFilterHeaderTable(tag: Tag)
|
||||
extends Table[CompactFilterHeaderDb](tag, "cfheaders") {
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash", O.PrimaryKey)
|
||||
|
||||
def filterHash = column[DoubleSha256DigestBE]("filter_hash")
|
||||
|
||||
def previousFilterHeader =
|
||||
column[DoubleSha256DigestBE]("previous_filter_header")
|
||||
|
||||
def blockHash = column[DoubleSha256DigestBE]("block_hash")
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def heightIndex = index("cfheaders_height_index", height)
|
||||
|
||||
def blockHashIndex = index("cfheaders_block_hash_index", blockHash)
|
||||
|
||||
override def * = {
|
||||
(hash, filterHash, previousFilterHeader, blockHash, height) <> (CompactFilterHeaderDb.tupled, CompactFilterHeaderDb.unapply)
|
||||
}
|
||||
}
|
||||
|
||||
override val table: profile.api.TableQuery[CompactFilterHeaderTable] = {
|
||||
TableQuery[CompactFilterHeaderTable]
|
||||
}
|
||||
|
||||
override def createAll(filterHeaders: Vector[CompactFilterHeaderDb]): Future[
|
||||
Vector[CompactFilterHeaderDb]] = {
|
||||
SlickUtil.createAllNoAutoInc(ts = filterHeaders,
|
||||
database = database,
|
||||
table = table)
|
||||
createAllNoAutoInc(ts = filterHeaders, database = safeDatabase)
|
||||
}
|
||||
|
||||
/** Finds the rows that correlate to the given primary keys */
|
||||
override protected def findByPrimaryKeys(
|
||||
ids: Vector[DoubleSha256DigestBE]): Query[
|
||||
Table[_],
|
||||
Table[CompactFilterHeaderDb],
|
||||
CompactFilterHeaderDb,
|
||||
Seq] =
|
||||
table.filter(_.hash.inSet(ids))
|
||||
@ -46,23 +68,22 @@ case class CompactFilterHeaderDAO()(
|
||||
def findByBlockHash(
|
||||
hash: DoubleSha256DigestBE): Future[Option[CompactFilterHeaderDb]] = {
|
||||
val query = table.filter(_.blockHash === hash).take(1)
|
||||
database.runVec(query.result).map(_.headOption)
|
||||
safeDatabase.runVec(query.result).map(_.headOption)
|
||||
}
|
||||
|
||||
def findAllByBlockHashes(hashes: Vector[DoubleSha256DigestBE]): Future[
|
||||
Vector[CompactFilterHeaderDb]] = {
|
||||
val query = table.filter(_.blockHash.inSet(hashes))
|
||||
database.runVec(query.result)
|
||||
safeDatabase.runVec(query.result)
|
||||
}
|
||||
|
||||
/** Retrieves a [[CompactFilterHeaderDb]] at the given height */
|
||||
def getAtHeight(height: Int): Future[Vector[CompactFilterHeaderDb]] = {
|
||||
val query = getAtHeightQuery(height)
|
||||
database.runVec(query)
|
||||
safeDatabase.runVec(query)
|
||||
}
|
||||
|
||||
private def getAtHeightQuery(
|
||||
height: Int): SQLiteProfile.StreamingProfileAction[
|
||||
private def getAtHeightQuery(height: Int): slick.sql.FixedSqlStreamingAction[
|
||||
Seq[CompactFilterHeaderDb],
|
||||
CompactFilterHeaderDb,
|
||||
Effect.Read] = {
|
||||
@ -76,11 +97,12 @@ case class CompactFilterHeaderDAO()(
|
||||
result
|
||||
}
|
||||
|
||||
private val maxHeightQuery: SQLiteProfile.ProfileAction[
|
||||
private val maxHeightQuery: profile.ProfileAction[
|
||||
Int,
|
||||
NoStream,
|
||||
Effect.Read] = {
|
||||
val query = table.map(_.height).max.getOrElse(0).result
|
||||
query
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,8 +2,6 @@ package org.bitcoins.chain.models
|
||||
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.gcs.FilterHeader
|
||||
import slick.lifted.Tag
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
case class CompactFilterHeaderDb(
|
||||
hashBE: DoubleSha256DigestBE,
|
||||
@ -34,27 +32,3 @@ object CompactFilterHeaderDbHelper {
|
||||
height = height
|
||||
)
|
||||
}
|
||||
|
||||
class CompactFilterHeaderTable(tag: Tag)
|
||||
extends Table[CompactFilterHeaderDb](tag, "cfheaders") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def hash = column[DoubleSha256DigestBE]("hash", O.PrimaryKey)
|
||||
|
||||
def filterHash = column[DoubleSha256DigestBE]("filter_hash")
|
||||
|
||||
def previousFilterHeader =
|
||||
column[DoubleSha256DigestBE]("previous_filter_header")
|
||||
|
||||
def blockHash = column[DoubleSha256DigestBE]("block_hash")
|
||||
|
||||
def height = column[Int]("height")
|
||||
|
||||
def heightIndex = index("cfheaders_height_index", height)
|
||||
|
||||
def blockHashIndex = index("cfheaders_block_hash_index", blockHash)
|
||||
|
||||
override def * = {
|
||||
(hash, filterHash, previousFilterHeader, blockHash, height) <> (CompactFilterHeaderDb.tupled, CompactFilterHeaderDb.unapply)
|
||||
}
|
||||
}
|
@ -7,33 +7,62 @@ import org.bitcoins.node.config.NodeAppConfig
|
||||
import org.bitcoins.node.db.NodeDbManagement
|
||||
import org.bitcoins.testkit.BitcoinSTestAppConfig
|
||||
import org.bitcoins.testkit.BitcoinSTestAppConfig.ProjectType
|
||||
import org.bitcoins.testkit.util.BitcoinSUnitTest
|
||||
import org.bitcoins.testkit.util.{BitcoinSAsyncTest, BitcoinSUnitTest}
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.db.WalletDbManagement
|
||||
|
||||
class DbManagementTest extends BitcoinSUnitTest {
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
class DbManagementTest extends BitcoinSAsyncTest {
|
||||
|
||||
def dbConfig(project: ProjectType): Config = {
|
||||
BitcoinSTestAppConfig.configWithMemoryDb(Some(project))
|
||||
}
|
||||
|
||||
def createChainDbManagement(
|
||||
chainAppConfig: ChainAppConfig): ChainDbManagement =
|
||||
new ChainDbManagement with JdbcProfileComponent[ChainAppConfig] {
|
||||
override val ec: ExecutionContext = system.dispatcher
|
||||
|
||||
override def appConfig: ChainAppConfig = chainAppConfig
|
||||
}
|
||||
|
||||
def createWalletDbManagement(
|
||||
walletAppConfig: WalletAppConfig): WalletDbManagement =
|
||||
new WalletDbManagement with JdbcProfileComponent[WalletAppConfig] {
|
||||
override val ec: ExecutionContext = system.dispatcher
|
||||
|
||||
override def appConfig: WalletAppConfig = walletAppConfig
|
||||
}
|
||||
|
||||
def createNodeDbManagement(nodeAppConfig: NodeAppConfig): NodeDbManagement =
|
||||
new NodeDbManagement with JdbcProfileComponent[NodeAppConfig] {
|
||||
override val ec: ExecutionContext = system.dispatcher
|
||||
|
||||
override def appConfig: NodeAppConfig = nodeAppConfig
|
||||
}
|
||||
|
||||
it must "run migrations for chain db" in {
|
||||
val chainAppConfig = ChainAppConfig(BitcoinSTestAppConfig.tmpDir(),
|
||||
dbConfig(ProjectType.Chain))
|
||||
val result = ChainDbManagement.migrate(chainAppConfig)
|
||||
val chainDbManagement = createChainDbManagement(chainAppConfig)
|
||||
val result = chainDbManagement.migrate()
|
||||
assert(result == 1)
|
||||
}
|
||||
|
||||
it must "run migrations for wallet db" in {
|
||||
val walletAppConfig = WalletAppConfig(BitcoinSTestAppConfig.tmpDir(),
|
||||
dbConfig(ProjectType.Wallet))
|
||||
val result = WalletDbManagement.migrate(walletAppConfig)
|
||||
val walletDbManagement = createWalletDbManagement(walletAppConfig)
|
||||
val result = walletDbManagement.migrate()
|
||||
assert(result == 4)
|
||||
}
|
||||
|
||||
it must "run migrations for node db" in {
|
||||
val nodeAppConfig =
|
||||
NodeAppConfig(BitcoinSTestAppConfig.tmpDir(), dbConfig(ProjectType.Node))
|
||||
val result = NodeDbManagement.migrate(nodeAppConfig)
|
||||
val nodeDbManagement = createNodeDbManagement(nodeAppConfig)
|
||||
val result = nodeDbManagement.migrate()
|
||||
assert(result == 1)
|
||||
}
|
||||
}
|
||||
|
@ -10,13 +10,7 @@ import org.bitcoins.core.config.TestNet3
|
||||
import org.bitcoins.core.config.RegTest
|
||||
import com.typesafe.config._
|
||||
import org.bitcoins.core.util.BitcoinSLogger
|
||||
import slick.jdbc.SQLiteProfile
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.util.Try
|
||||
import scala.util.Success
|
||||
import scala.util.Failure
|
||||
import slick.basic.DatabaseConfig
|
||||
import org.bitcoins.core.protocol.blockchain.MainNetChainParams
|
||||
import org.bitcoins.core.protocol.blockchain.TestNetChainParams
|
||||
import org.bitcoins.core.protocol.blockchain.RegTestNetChainParams
|
||||
@ -35,7 +29,9 @@ import ch.qos.logback.classic.Level
|
||||
* @see [[https://github.com/bitcoin-s/bitcoin-s-core/blob/master/doc/configuration.md `configuration.md`]]
|
||||
* for more information.
|
||||
*/
|
||||
abstract class AppConfig extends BitcoinSLogger {
|
||||
abstract class AppConfig {
|
||||
|
||||
private val logger = BitcoinSLogger.logger
|
||||
|
||||
/**
|
||||
* Initializes this project.
|
||||
@ -125,72 +121,6 @@ abstract class AppConfig extends BitcoinSLogger {
|
||||
*/
|
||||
protected[bitcoins] def moduleName: String
|
||||
|
||||
lazy val jdbcUrl: String = {
|
||||
dbConfig.config.getString("db.url")
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration details for connecting/using the database for our projects
|
||||
* that require datbase connections
|
||||
*/
|
||||
lazy val dbConfig: DatabaseConfig[SQLiteProfile] = {
|
||||
val dbConfig = {
|
||||
Try {
|
||||
DatabaseConfig.forConfig[SQLiteProfile](path = moduleName, config)
|
||||
} match {
|
||||
case Success(value) =>
|
||||
value
|
||||
case Failure(exception) =>
|
||||
logger.error(s"Error when loading database from config: $exception")
|
||||
logger.error(s"Configuration: ${config.asReadableJson}")
|
||||
throw exception
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(s"Resolved DB config: ${dbConfig.config}")
|
||||
|
||||
val _ = createDbFileIfDNE()
|
||||
|
||||
dbConfig
|
||||
}
|
||||
|
||||
/** The database we are connecting to */
|
||||
lazy val database: Database = {
|
||||
dbConfig.db
|
||||
}
|
||||
|
||||
/** The path where our DB is located */
|
||||
// todo: what happens to this if we
|
||||
// dont use SQLite?
|
||||
lazy val dbPath: Path = {
|
||||
val pathStr = config.getString(s"$moduleName.db.path")
|
||||
val path = Paths.get(pathStr)
|
||||
logger.debug(s"DB path: $path")
|
||||
path
|
||||
}
|
||||
|
||||
/** The name of our database */
|
||||
// todo: what happens to this if we
|
||||
// dont use SQLite?
|
||||
lazy val dbName: String = {
|
||||
config.getString(s"$moduleName.db.name")
|
||||
}
|
||||
|
||||
private def createDbFileIfDNE(): Unit = {
|
||||
//should add a check in here that we are using sqlite
|
||||
if (!Files.exists(dbPath)) {
|
||||
val _ = {
|
||||
logger.debug(s"Creating database directory=$dbPath")
|
||||
Files.createDirectories(dbPath)
|
||||
val dbFilePath = dbPath.resolve(dbName)
|
||||
logger.debug(s"Creating database file=$dbFilePath")
|
||||
Files.createFile(dbFilePath)
|
||||
}
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
/** Chain parameters for the blockchain we're on */
|
||||
lazy val chain: ChainParams = {
|
||||
val networkStr = config.getString("network")
|
||||
|
@ -2,8 +2,8 @@ package org.bitcoins.db
|
||||
|
||||
import java.sql.SQLException
|
||||
|
||||
import org.bitcoins.core.config.MainNet
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import org.bitcoins.core.util.BitcoinSLogger
|
||||
import slick.dbio.{DBIOAction, NoStream}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
@ -16,15 +16,37 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
* the table and the database you are connecting to.
|
||||
*/
|
||||
abstract class CRUD[T, PrimaryKeyType](
|
||||
implicit private val config: AppConfig,
|
||||
private val ec: ExecutionContext)
|
||||
extends DatabaseLogger {
|
||||
implicit private val ec: ExecutionContext,
|
||||
override val appConfig: AppConfig)
|
||||
extends JdbcProfileComponent[AppConfig] {
|
||||
|
||||
import profile.api._
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
||||
/** We need to cast from TableQuery's of internal types (e.g. AddressDAO#AddressTable) to external
|
||||
* versions of them (e.g. AddressDAO().table). You'll notice that although the latter is a subtype
|
||||
* of the first, this requires a cast since TableQuery is not covariant in its type parameter.
|
||||
*
|
||||
* However, since Query is covariant in its first type parameter, I believe the cast from
|
||||
* TableQuery[T1] to TableQuery[T2] will always be safe so long as T1 is a subtype of T2
|
||||
* AND T1#TableElementType is equal to T2#TableElementType.
|
||||
*
|
||||
* The above conditions are always the case when this is called within DAOs as it is only
|
||||
* ever used for things of the form TableQuery[XDAO().table] -> TableQuery[XDAO#XTable].
|
||||
*/
|
||||
implicit protected def tableQuerySafeSubtypeCast[
|
||||
SpecificT <: slick.lifted.AbstractTable[_],
|
||||
SomeT <: SpecificT](
|
||||
tableQuery: TableQuery[SomeT]): TableQuery[SpecificT] = {
|
||||
tableQuery.asInstanceOf[TableQuery[SpecificT]]
|
||||
}
|
||||
|
||||
/** The table inside our database we are inserting into */
|
||||
val table: TableQuery[_ <: Table[T]]
|
||||
val table: profile.api.TableQuery[_ <: profile.api.Table[T]]
|
||||
|
||||
/** Binding to the actual database itself, this is what is used to run querys */
|
||||
def database: SafeDatabase = SafeDatabase(config)
|
||||
def safeDatabase: SafeDatabase = SafeDatabase(this)
|
||||
|
||||
/**
|
||||
* create a record in the database
|
||||
@ -33,7 +55,7 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
* @return the inserted record
|
||||
*/
|
||||
def create(t: T): Future[T] = {
|
||||
logger.trace(s"Writing $t to DB with config: ${config.config}")
|
||||
logger.trace(s"Writing $t to DB with config: ${appConfig.config}")
|
||||
createAll(Vector(t)).map(_.head)
|
||||
}
|
||||
|
||||
@ -46,9 +68,9 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
* @return Option[T] - the record if found, else none
|
||||
*/
|
||||
def read(id: PrimaryKeyType): Future[Option[T]] = {
|
||||
logger.trace(s"Reading from DB with config: ${config.config}")
|
||||
logger.trace(s"Reading from DB with config: ${appConfig.config}")
|
||||
val query = findByPrimaryKey(id)
|
||||
val rows: Future[Seq[T]] = database.run(query.result)
|
||||
val rows: Future[Seq[T]] = safeDatabase.run(query.result)
|
||||
rows.map(_.headOption)
|
||||
}
|
||||
|
||||
@ -66,10 +88,11 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
def updateAll(ts: Vector[T]): Future[Vector[T]] = {
|
||||
val query = findAll(ts)
|
||||
val actions = ts.map(t => query.update(t))
|
||||
val affectedRows: Future[Vector[Int]] = database.run(DBIO.sequence(actions))
|
||||
val affectedRows: Future[Vector[Int]] =
|
||||
safeDatabase.run(DBIO.sequence(actions))
|
||||
val updatedTs = findAll(ts)
|
||||
affectedRows.flatMap { _ =>
|
||||
database.runVec(updatedTs.result)
|
||||
safeDatabase.runVec(updatedTs.result)
|
||||
}
|
||||
}
|
||||
|
||||
@ -82,14 +105,14 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
def delete(t: T): Future[Int] = {
|
||||
logger.debug("Deleting record: " + t)
|
||||
val query: Query[Table[_], T, Seq] = find(t)
|
||||
database.run(query.delete)
|
||||
safeDatabase.run(query.delete)
|
||||
}
|
||||
|
||||
/**
|
||||
* delete all records from the table
|
||||
*/
|
||||
def deleteAll(): Future[Int] =
|
||||
database.run(table.delete)
|
||||
safeDatabase.run(table.delete)
|
||||
|
||||
/**
|
||||
* insert the record if it does not exist, update it if it does
|
||||
@ -102,9 +125,9 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
/** Upserts all of the given ts in the database, then returns the upserted values */
|
||||
def upsertAll(ts: Vector[T]): Future[Vector[T]] = {
|
||||
val actions = ts.map(t => table.insertOrUpdate(t))
|
||||
val result: Future[Vector[Int]] = database.run(DBIO.sequence(actions))
|
||||
val result: Future[Vector[Int]] = safeDatabase.run(DBIO.sequence(actions))
|
||||
val findQueryFuture = result.map(_ => findAll(ts).result)
|
||||
findQueryFuture.flatMap(database.runVec(_))
|
||||
findQueryFuture.flatMap(safeDatabase.runVec(_))
|
||||
}
|
||||
|
||||
/**
|
||||
@ -118,7 +141,7 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
|
||||
/** Finds the rows that correlate to the given primary keys */
|
||||
protected def findByPrimaryKeys(
|
||||
ids: Vector[PrimaryKeyType]): Query[Table[_], T, Seq]
|
||||
ids: Vector[PrimaryKeyType]): Query[Table[T], T, Seq]
|
||||
|
||||
/**
|
||||
* return the row that corresponds with this record
|
||||
@ -132,16 +155,17 @@ abstract class CRUD[T, PrimaryKeyType](
|
||||
|
||||
/** Finds all elements in the table */
|
||||
def findAll(): Future[Vector[T]] =
|
||||
database.run(table.result).map(_.toVector)
|
||||
safeDatabase.run(table.result).map(_.toVector)
|
||||
|
||||
/** Returns number of rows in the table */
|
||||
def count(): Future[Int] = database.run(table.length.result)
|
||||
def count(): Future[Int] = safeDatabase.run(table.length.result)
|
||||
}
|
||||
|
||||
case class SafeDatabase(config: AppConfig) extends DatabaseLogger {
|
||||
implicit private val conf: AppConfig = config
|
||||
case class SafeDatabase(jdbcProfile: JdbcProfileComponent[AppConfig])
|
||||
extends BitcoinSLogger {
|
||||
|
||||
import config.database
|
||||
import jdbcProfile.database
|
||||
import jdbcProfile.profile.api.actionBasedSQLInterpolation
|
||||
|
||||
/**
|
||||
* SQLite does not enable foreign keys by default. This query is
|
||||
@ -154,11 +178,9 @@ case class SafeDatabase(config: AppConfig) extends DatabaseLogger {
|
||||
private def logAndThrowError(
|
||||
action: DBIOAction[_, NoStream, _]): PartialFunction[Throwable, Nothing] = {
|
||||
case err: SQLException =>
|
||||
if (config.network != MainNet) {
|
||||
logger.error(
|
||||
s"Error when executing query ${action.getDumpInfo.getNamePlusMainInfo}")
|
||||
logger.error(s"$err")
|
||||
}
|
||||
logger.error(
|
||||
s"Error when executing query ${action.getDumpInfo.getNamePlusMainInfo}")
|
||||
logger.error(s"$err")
|
||||
throw err
|
||||
}
|
||||
|
||||
|
@ -1,30 +1,33 @@
|
||||
package org.bitcoins.db
|
||||
|
||||
import slick.dbio.Effect.Write
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.concurrent.Future
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
abstract class CRUDAutoInc[T <: DbRowAutoInc[T]](
|
||||
implicit config: AppConfig,
|
||||
ec: ExecutionContext)
|
||||
extends CRUD[T, Long] {
|
||||
implicit ec: ExecutionContext,
|
||||
override val appConfig: AppConfig)
|
||||
extends CRUD[T, Long]()(ec, appConfig)
|
||||
with TableAutoIncComponent[T] {
|
||||
import profile.api._
|
||||
|
||||
/** The table inside our database we are inserting into */
|
||||
override val table: TableQuery[_ <: TableAutoInc[T]]
|
||||
override val table: profile.api.TableQuery[_ <: TableAutoInc[T]]
|
||||
|
||||
override def createAll(ts: Vector[T]): Future[Vector[T]] = {
|
||||
val query = table
|
||||
.returning(table.map(_.id))
|
||||
.into((t, id) => t.copyWithId(id = id))
|
||||
val actions: DBIOAction[query.MultiInsertResult, NoStream, Write] =
|
||||
query.++=(ts)
|
||||
database.runVec(actions)
|
||||
val idQuery = table.map(_.id)
|
||||
val idAutoInc = table.returning(idQuery)
|
||||
val query = {
|
||||
idAutoInc.into((t, id) => t.copyWithId(id = id))
|
||||
}
|
||||
val actions = query.++=(ts)
|
||||
safeDatabase.runVec(actions)
|
||||
}
|
||||
|
||||
override def findByPrimaryKeys(ids: Vector[Long]): Query[Table[_], T, Seq] = {
|
||||
table.filter(_.id.inSet(ids))
|
||||
override def findByPrimaryKeys(
|
||||
ids: Vector[Long]): Query[TableAutoInc[T], T, Seq] = {
|
||||
table.filter { t =>
|
||||
t.id.inSet(ids)
|
||||
}
|
||||
}
|
||||
|
||||
override def findAll(ts: Vector[T]): Query[Table[_], T, Seq] = {
|
||||
@ -32,3 +35,19 @@ abstract class CRUDAutoInc[T <: DbRowAutoInc[T]](
|
||||
findByPrimaryKeys(ids)
|
||||
}
|
||||
}
|
||||
|
||||
/** Defines a table that has an auto incremented fields that is named id.
|
||||
* This is useful for things we want to store that don't have an
|
||||
* inherent id such as a hash.
|
||||
* @param tag
|
||||
* @param tableName
|
||||
* @tparam T
|
||||
*/
|
||||
trait TableAutoIncComponent[T <: DbRowAutoInc[T]] { self: CRUDAutoInc[T] =>
|
||||
import profile.api._
|
||||
|
||||
abstract class TableAutoInc[T](tag: profile.api.Tag, tableName: String)
|
||||
extends profile.api.Table[T](tag, tableName) {
|
||||
def id: Rep[Long] = column[Long]("id", O.PrimaryKey, O.AutoInc)
|
||||
}
|
||||
}
|
||||
|
@ -1,34 +1,40 @@
|
||||
package org.bitcoins.db
|
||||
|
||||
import org.bitcoins.core.util.BitcoinSLogger
|
||||
import org.flywaydb.core.Flyway
|
||||
import org.flywaydb.core.api.FlywayException
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
abstract class DbManagement extends DatabaseLogger {
|
||||
def allTables: List[TableQuery[_ <: Table[_]]]
|
||||
trait DbManagement extends BitcoinSLogger {
|
||||
_: JdbcProfileComponent[AppConfig] =>
|
||||
import profile.api._
|
||||
|
||||
/** Lists all tables in the given database */
|
||||
def listTables(db: Database): Future[Vector[SQLiteTableInfo]] = {
|
||||
import DbCommonsColumnMappers._
|
||||
val query = sql"SELECT * FROM sqlite_master where type='table'"
|
||||
.as[SQLiteTableInfo]
|
||||
db.run(query)
|
||||
import scala.language.implicitConversions
|
||||
|
||||
/** Internally, slick defines the schema member as
|
||||
*
|
||||
* def schema: SchemaDescription = buildTableSchemaDescription(q.shaped.value.asInstanceOf[Table[_]])
|
||||
*
|
||||
* we need to cast between TableQuery's of specific table types to the more generic TableQuery[Table[_]]
|
||||
* to get methods in this trait working as they require schema (which essentially does this cast anyway)
|
||||
*
|
||||
* This cast is needed because TableQuery is not covariant in its type parameter. However, since Query
|
||||
* is covariant in its first type parameter, I believe the cast from TableQuery[T1] to TableQuery[T2] will
|
||||
* always be safe so long as T1 is a subtype of T2 AND T1#TableElementType is equal to T2#TableElementType.
|
||||
*
|
||||
* The above conditions are always the case when this is called in the current code base and will
|
||||
* stay that way so long as no one tries anything too fancy.
|
||||
*/
|
||||
implicit protected def tableQueryToWithSchema(
|
||||
tableQuery: TableQuery[_]): TableQuery[Table[_]] = {
|
||||
tableQuery.asInstanceOf[TableQuery[Table[_]]]
|
||||
}
|
||||
|
||||
/** Lists all tables in the given database */
|
||||
def listTables(db: SafeDatabase): Future[Vector[SQLiteTableInfo]] =
|
||||
listTables(db.config.database)
|
||||
def allTables: List[TableQuery[Table[_]]]
|
||||
|
||||
/** Creates all tables in our table list, in one SQL transaction */
|
||||
def createAll()(
|
||||
implicit config: AppConfig,
|
||||
ec: ExecutionContext
|
||||
): Future[Unit] = {
|
||||
val tables = allTables.map(_.baseTableRow.tableName).mkString(", ")
|
||||
logger.debug(s"Creating tables: $tables")
|
||||
|
||||
def createAll()(implicit ec: ExecutionContext): Future[Unit] = {
|
||||
val query = {
|
||||
val querySeq =
|
||||
allTables
|
||||
@ -42,13 +48,10 @@ abstract class DbManagement extends DatabaseLogger {
|
||||
DBIO.seq(querySeq: _*).transactionally
|
||||
}
|
||||
|
||||
import config.database
|
||||
database.run(query).map(_ => logger.debug(s"Created tables: $tables"))
|
||||
database.run(query).map(_ => logger.debug(s"Created tables"))
|
||||
}
|
||||
|
||||
def dropAll()(
|
||||
implicit config: AppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
def dropAll()(implicit ec: ExecutionContext): Future[Unit] = {
|
||||
Future.sequence(allTables.reverse.map(dropTable(_))).map(_ => ())
|
||||
}
|
||||
|
||||
@ -67,21 +70,18 @@ abstract class DbManagement extends DatabaseLogger {
|
||||
def createTable(
|
||||
table: TableQuery[_ <: Table[_]],
|
||||
createIfNotExists: Boolean = true)(
|
||||
implicit config: AppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
implicit ec: ExecutionContext): Future[Unit] = {
|
||||
val tableName = table.baseTableRow.tableName
|
||||
logger.debug(
|
||||
s"Creating table $tableName with DB config: ${config.dbConfig.config} ")
|
||||
s"Creating table $tableName with DB config: ${appConfig.config} ")
|
||||
|
||||
import config.database
|
||||
val query = createTableQuery(table, createIfNotExists)
|
||||
database.run(query).map(_ => logger.debug(s"Created table $tableName"))
|
||||
}
|
||||
|
||||
def dropTable(
|
||||
table: TableQuery[_ <: Table[_]]
|
||||
)(implicit config: AppConfig): Future[Unit] = {
|
||||
import config.database
|
||||
table: TableQuery[Table[_]]
|
||||
): Future[Unit] = {
|
||||
val result = database.run(table.schema.dropIfExists)
|
||||
result
|
||||
}
|
||||
@ -89,21 +89,21 @@ abstract class DbManagement extends DatabaseLogger {
|
||||
/** Executes migrations related to this database
|
||||
*
|
||||
* @see [[https://flywaydb.org/documentation/api/#programmatic-configuration-java]] */
|
||||
def migrate(appConfig: AppConfig): Int = {
|
||||
val url = appConfig.jdbcUrl
|
||||
def migrate(): Int = {
|
||||
val url = jdbcUrl
|
||||
val username = ""
|
||||
val password = ""
|
||||
//appConfig.dbName is for the format 'walletdb.sqlite' or 'nodedb.sqlite' etc
|
||||
//we need to remove the '.sqlite' suffix
|
||||
val dbName = appConfig.dbName.split('.').head.mkString
|
||||
val config = Flyway.configure().locations(s"classpath:${dbName}/migration/")
|
||||
val name = dbName.split('.').head.mkString
|
||||
val config = Flyway.configure().locations(s"classpath:${name}/migration/")
|
||||
val flyway = config.dataSource(url, username, password).load
|
||||
|
||||
try {
|
||||
flyway.migrate()
|
||||
} catch {
|
||||
case err: FlywayException =>
|
||||
logger(appConfig).warn(
|
||||
logger.warn(
|
||||
s"Failed to apply first round of migrations, attempting baseline and re-apply",
|
||||
err)
|
||||
//maybe we have an existing database, so attempt to baseline the existing
|
||||
|
@ -0,0 +1,82 @@
|
||||
package org.bitcoins.db
|
||||
|
||||
import java.nio.file.{Files, Path, Paths}
|
||||
|
||||
import org.bitcoins.core.util.BitcoinSLogger
|
||||
import slick.basic.DatabaseConfig
|
||||
import slick.jdbc.JdbcProfile
|
||||
|
||||
import scala.util.{Failure, Success, Try}
|
||||
|
||||
trait JdbcProfileComponent[+ConfigType <: AppConfig] extends BitcoinSLogger {
|
||||
|
||||
def appConfig: ConfigType
|
||||
|
||||
/**
|
||||
* The configuration details for connecting/using the database for our projects
|
||||
* that require datbase connections
|
||||
*/
|
||||
val dbConfig: DatabaseConfig[JdbcProfile] = {
|
||||
val slickDbConfig = {
|
||||
Try {
|
||||
DatabaseConfig.forConfig[JdbcProfile](path = appConfig.moduleName,
|
||||
config = appConfig.config)
|
||||
} match {
|
||||
case Success(value) =>
|
||||
value
|
||||
case Failure(exception) =>
|
||||
logger.error(s"Error when loading database from config: $exception")
|
||||
logger.error(s"Configuration: ${appConfig.config.asReadableJson}")
|
||||
throw exception
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(s"Resolved DB config: ${slickDbConfig.config.asReadableJson}")
|
||||
|
||||
val _ = createDbFileIfDNE()
|
||||
|
||||
slickDbConfig
|
||||
}
|
||||
|
||||
val profile: JdbcProfile = dbConfig.profile
|
||||
import profile.api._
|
||||
|
||||
lazy val jdbcUrl: String = {
|
||||
dbConfig.config.getString("db.url")
|
||||
}
|
||||
|
||||
/** The database we are connecting to */
|
||||
lazy val database: Database = {
|
||||
dbConfig.db
|
||||
}
|
||||
|
||||
/** The path where our DB is located */
|
||||
lazy val dbPath: Path = {
|
||||
val pathStr = appConfig.config.getString(s"${appConfig.moduleName}.db.path")
|
||||
val path = Paths.get(pathStr)
|
||||
logger.debug(s"DB path: $path")
|
||||
path
|
||||
}
|
||||
|
||||
/** The name of our database */
|
||||
// todo: what happens to this if we
|
||||
// dont use SQLite?
|
||||
lazy val dbName: String = {
|
||||
appConfig.config.getString(s"${appConfig.moduleName}.db.name")
|
||||
}
|
||||
|
||||
private def createDbFileIfDNE(): Unit = {
|
||||
//should add a check in here that we are using sqlite
|
||||
if (!Files.exists(dbPath)) {
|
||||
val _ = {
|
||||
logger.debug(s"Creating database directory=$dbPath")
|
||||
Files.createDirectories(dbPath)
|
||||
val dbFilePath = dbPath.resolve(dbName)
|
||||
logger.debug(s"Creating database file=$dbFilePath")
|
||||
Files.createFile(dbFilePath)
|
||||
}
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +1,20 @@
|
||||
package org.bitcoins.db
|
||||
|
||||
import slick.jdbc.JdbcProfile
|
||||
|
||||
import scala.concurrent.Future
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
sealed abstract class SlickUtil {
|
||||
trait SlickUtil[T, PrimaryKeyType] { _: CRUD[T, PrimaryKeyType] =>
|
||||
def profile: JdbcProfile
|
||||
|
||||
import profile.api._
|
||||
|
||||
/** Creates rows in a database that are not auto incremented */
|
||||
def createAllNoAutoInc[T, U <: Table[T]](
|
||||
ts: Vector[T],
|
||||
database: SafeDatabase,
|
||||
table: TableQuery[U])(
|
||||
def createAllNoAutoInc(ts: Vector[T], database: SafeDatabase)(
|
||||
implicit ec: ExecutionContext): Future[Vector[T]] = {
|
||||
val actions = (table ++= ts).andThen(DBIO.successful(ts)).transactionally
|
||||
val result = database.run(actions)
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
object SlickUtil extends SlickUtil
|
||||
|
@ -25,6 +25,7 @@ import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import com.typesafe.config.ConfigFactory
|
||||
import java.nio.file.Paths
|
||||
import scala.util.Properties
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
|
||||
// reads $HOME/.bitcoin-s/
|
||||
val defaultConfig = WalletAppConfig.fromDefaultDatadir()
|
||||
|
@ -14,7 +14,6 @@ import org.bitcoins.testkit.node.{NodeTestUtil, NodeUnitTest}
|
||||
import org.bitcoins.testkit.wallet.BitcoinSWalletTest
|
||||
import org.bitcoins.wallet.api.UnlockedWalletApi
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.models.SpendingInfoTable
|
||||
import org.scalatest.{DoNotDiscover, FutureOutcome}
|
||||
|
||||
import scala.concurrent.{Future, Promise}
|
||||
@ -146,14 +145,6 @@ class NeutrinoNodeWithWalletTest extends NodeUnitTest {
|
||||
|
||||
walletP.success(wallet)
|
||||
|
||||
def clearSpendingInfoTable(): Future[Int] = {
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
val conf: WalletAppConfig = wallet.walletConfig
|
||||
val table = TableQuery[SpendingInfoTable]
|
||||
conf.database.run(table.delete)
|
||||
}
|
||||
|
||||
def condition(): Future[Boolean] = {
|
||||
for {
|
||||
balance <- wallet.getConfirmedBalance()
|
||||
@ -187,7 +178,7 @@ class NeutrinoNodeWithWalletTest extends NodeUnitTest {
|
||||
_ = assert(addresses.size == 2)
|
||||
_ = assert(utxos.size == 1)
|
||||
|
||||
_ <- clearSpendingInfoTable()
|
||||
_ <- wallet.clearUtxosAndAddresses()
|
||||
|
||||
addresses <- wallet.listAddresses()
|
||||
utxos <- wallet.listUtxos()
|
||||
|
@ -1,6 +1,6 @@
|
||||
package org.bitcoins.node
|
||||
|
||||
import org.bitcoins.testkit.util.BitcoinSUnitTest
|
||||
import org.bitcoins.testkit.util.{BitcoinSAsyncTest, BitcoinSUnitTest}
|
||||
import org.bitcoins.node.config.NodeAppConfig
|
||||
import org.bitcoins.core.config.TestNet3
|
||||
import com.typesafe.config.Config
|
||||
@ -10,7 +10,7 @@ import org.bitcoins.core.config.MainNet
|
||||
import ch.qos.logback.classic.Level
|
||||
import java.nio.file.Files
|
||||
|
||||
class NodeAppConfigTest extends BitcoinSUnitTest {
|
||||
class NodeAppConfigTest extends BitcoinSAsyncTest {
|
||||
val tempDir = Files.createTempDirectory("bitcoin-s")
|
||||
val config = NodeAppConfig(directory = tempDir)
|
||||
|
||||
|
@ -119,14 +119,14 @@ class P2PClientTest extends BitcoindRpcTest {
|
||||
override def beforeAll(): Unit = {
|
||||
implicit val chainConf = config.chainConf
|
||||
for {
|
||||
_ <- ChainDbManagement.createAll()
|
||||
_ <- chainConf.createAll()
|
||||
} yield ()
|
||||
}
|
||||
|
||||
override def afterAll(): Unit = {
|
||||
implicit val chainConf = config.chainConf
|
||||
for {
|
||||
_ <- ChainDbManagement.dropAll()
|
||||
_ <- chainConf.dropAll()
|
||||
} yield ()
|
||||
super.afterAll()
|
||||
}
|
||||
|
@ -25,7 +25,6 @@ import org.bitcoins.node.networking.peer.{
|
||||
}
|
||||
import org.bitcoins.node.util.BitcoinSNodeUtil.Mutable
|
||||
import org.bitcoins.rpc.util.AsyncUtil
|
||||
import slick.jdbc.SQLiteProfile
|
||||
|
||||
import scala.concurrent.duration.DurationInt
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
@ -55,7 +54,7 @@ trait Node extends NodeApi with ChainQueryApi with P2PLogger {
|
||||
this
|
||||
}
|
||||
|
||||
lazy val txDAO = BroadcastAbleTransactionDAO(SQLiteProfile)
|
||||
lazy val txDAO = BroadcastAbleTransactionDAO()
|
||||
|
||||
/** This is constructing a chain api from disk every time we call this method
|
||||
* This involves database calls which can be slow and expensive to construct
|
||||
|
@ -4,7 +4,7 @@ import java.nio.file.Path
|
||||
|
||||
import com.typesafe.config.Config
|
||||
import org.bitcoins.core.util.FutureUtil
|
||||
import org.bitcoins.db.AppConfig
|
||||
import org.bitcoins.db.{AppConfig, JdbcProfileComponent}
|
||||
import org.bitcoins.node.db.NodeDbManagement
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
@ -15,10 +15,12 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
*/
|
||||
case class NodeAppConfig(
|
||||
private val directory: Path,
|
||||
private val confs: Config*)
|
||||
extends AppConfig {
|
||||
private val confs: Config*)(implicit override val ec: ExecutionContext)
|
||||
extends AppConfig
|
||||
with NodeDbManagement
|
||||
with JdbcProfileComponent[NodeAppConfig] {
|
||||
override protected[bitcoins] def configOverrides: List[Config] = confs.toList
|
||||
override protected[bitcoins] val moduleName: String = "node"
|
||||
override protected[bitcoins] def moduleName: String = "node"
|
||||
override protected[bitcoins] type ConfigType = NodeAppConfig
|
||||
override protected[bitcoins] def newConfigOfType(
|
||||
configs: Seq[Config]): NodeAppConfig =
|
||||
@ -26,13 +28,15 @@ case class NodeAppConfig(
|
||||
|
||||
protected[bitcoins] def baseDatadir: Path = directory
|
||||
|
||||
override def appConfig: NodeAppConfig = this
|
||||
|
||||
/**
|
||||
* Ensures correct tables and other required information is in
|
||||
* place for our node.
|
||||
*/
|
||||
override def initialize()(implicit ec: ExecutionContext): Future[Unit] = {
|
||||
logger.debug(s"Initializing node setup")
|
||||
val numMigrations = NodeDbManagement.migrate(this)
|
||||
val numMigrations = migrate()
|
||||
|
||||
logger.info(s"Applied $numMigrations migrations fro the node project")
|
||||
|
||||
@ -70,7 +74,8 @@ object NodeAppConfig {
|
||||
/** Constructs a node configuration from the default Bitcoin-S
|
||||
* data directory and given list of configuration overrides.
|
||||
*/
|
||||
def fromDefaultDatadir(confs: Config*): NodeAppConfig =
|
||||
def fromDefaultDatadir(confs: Config*)(
|
||||
implicit ec: ExecutionContext): NodeAppConfig =
|
||||
NodeAppConfig(AppConfig.DEFAULT_BITCOIN_S_DATADIR, confs: _*)
|
||||
|
||||
}
|
||||
|
@ -1,13 +1,22 @@
|
||||
package org.bitcoins.node.db
|
||||
|
||||
import org.bitcoins.db.DbManagement
|
||||
import org.bitcoins.node.models.BroadcastAbleTransactionTable
|
||||
import slick.lifted.TableQuery
|
||||
import org.bitcoins.db.{DbManagement, JdbcProfileComponent}
|
||||
import org.bitcoins.node.config.NodeAppConfig
|
||||
import org.bitcoins.node.models.{BroadcastAbleTransactionDAO}
|
||||
|
||||
object NodeDbManagement extends DbManagement {
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
private val txTable = TableQuery[BroadcastAbleTransactionTable]
|
||||
trait NodeDbManagement extends DbManagement {
|
||||
_: JdbcProfileComponent[NodeAppConfig] =>
|
||||
|
||||
override val allTables = List(txTable)
|
||||
import profile.api._
|
||||
|
||||
def ec: ExecutionContext
|
||||
|
||||
private lazy val txTable: TableQuery[Table[_]] = {
|
||||
BroadcastAbleTransactionDAO()(appConfig, ec).table
|
||||
}
|
||||
|
||||
override val allTables: List[TableQuery[Table[_]]] = List(txTable)
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,12 @@
|
||||
package org.bitcoins.node.models
|
||||
|
||||
import org.bitcoins.core.protocol.transaction.Transaction
|
||||
import org.bitcoins.db.DbRowAutoInc
|
||||
|
||||
/** TXs we can broadcast over the P2P network */
|
||||
final case class BroadcastAbleTransaction(
|
||||
transaction: Transaction,
|
||||
id: Option[Long] = None)
|
||||
extends DbRowAutoInc[BroadcastAbleTransaction] {
|
||||
def copyWithId(id: Long): BroadcastAbleTransaction = copy(id = Some(id))
|
||||
}
|
@ -1,21 +1,25 @@
|
||||
package org.bitcoins.node.models
|
||||
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.jdbc.JdbcProfile
|
||||
import org.bitcoins.db.CRUDAutoInc
|
||||
import org.bitcoins.node.config.NodeAppConfig
|
||||
import scala.concurrent.ExecutionContext
|
||||
import slick.lifted.TableQuery
|
||||
import scala.concurrent.Future
|
||||
import org.bitcoins.core.crypto.DoubleSha256Digest
|
||||
|
||||
final case class BroadcastAbleTransactionDAO(profile: JdbcProfile)(
|
||||
implicit val appConfig: NodeAppConfig,
|
||||
import scala.concurrent.ExecutionContext
|
||||
import slick.lifted.{ProvenShape}
|
||||
|
||||
import scala.concurrent.Future
|
||||
import org.bitcoins.core.crypto.{DoubleSha256Digest, DoubleSha256DigestBE}
|
||||
import org.bitcoins.core.protocol.transaction.Transaction
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
final case class BroadcastAbleTransactionDAO()(
|
||||
implicit override val appConfig: NodeAppConfig,
|
||||
val ec: ExecutionContext)
|
||||
extends CRUDAutoInc[BroadcastAbleTransaction] {
|
||||
|
||||
val table: TableQuery[BroadcastAbleTransactionTable] =
|
||||
TableQuery[BroadcastAbleTransactionTable]
|
||||
import profile.api._
|
||||
|
||||
override val table: profile.api.TableQuery[BroadcastAbleTransactionTable] =
|
||||
profile.api.TableQuery[BroadcastAbleTransactionTable]
|
||||
|
||||
/** Searches for a TX by its TXID */
|
||||
def findByHash(
|
||||
@ -26,4 +30,29 @@ final case class BroadcastAbleTransactionDAO(profile: JdbcProfile)(
|
||||
database.run(query.result).map(_.headOption)
|
||||
}
|
||||
|
||||
/** Table over TXs we can broadcast over the P2P network */
|
||||
class BroadcastAbleTransactionTable(tag: Tag)
|
||||
extends TableAutoInc[BroadcastAbleTransaction](tag, "broadcast_elements") {
|
||||
private type Tuple = (DoubleSha256DigestBE, ByteVector, Option[Long])
|
||||
|
||||
private val fromTuple: (Tuple => BroadcastAbleTransaction) = {
|
||||
case (txid, bytes, id) =>
|
||||
val tx = Transaction.fromBytes(bytes)
|
||||
require(tx.txId == txid.flip)
|
||||
BroadcastAbleTransaction(tx, id)
|
||||
}
|
||||
|
||||
private val toTuple: BroadcastAbleTransaction => Option[Tuple] = tx =>
|
||||
Some(tx.transaction.txId.flip, tx.transaction.bytes, tx.id)
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def txid: Rep[DoubleSha256DigestBE] = column("txid", O.Unique)
|
||||
def bytes: Rep[ByteVector] = column("tx_bytes")
|
||||
|
||||
def * : ProvenShape[BroadcastAbleTransaction] =
|
||||
(txid, bytes, id.?) <>
|
||||
(fromTuple, toTuple)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -206,7 +206,9 @@ case class P2PClientActor(
|
||||
logger.trace(s"Unaligned bytes: ${newUnalignedBytes.toHex}")
|
||||
}
|
||||
|
||||
val f: (PeerMessageReceiver, NetworkMessage) => Future[PeerMessageReceiver] = {
|
||||
val f: (
|
||||
PeerMessageReceiver,
|
||||
NetworkMessage) => Future[PeerMessageReceiver] = {
|
||||
case (peerMsgRecv: PeerMessageReceiver, m: NetworkMessage) =>
|
||||
logger.trace(s"Processing message=${m}")
|
||||
val msg = NetworkMessageReceived(m, P2PClient(self, peer))
|
||||
@ -214,7 +216,9 @@ case class P2PClientActor(
|
||||
}
|
||||
|
||||
logger.trace(s"About to process ${messages.length} messages")
|
||||
val newMsgReceiverF = FutureUtil.foldLeftAsync(currentPeerMsgHandlerRecv,messages)(f)(context.dispatcher)
|
||||
val newMsgReceiverF =
|
||||
FutureUtil.foldLeftAsync(currentPeerMsgHandlerRecv, messages)(f)(
|
||||
context.dispatcher)
|
||||
|
||||
val newMsgReceiver = Await.result(newMsgReceiverF, timeout)
|
||||
currentPeerMsgHandlerRecv = newMsgReceiver
|
||||
|
@ -11,7 +11,6 @@ import org.bitcoins.core.util.FutureUtil
|
||||
import org.bitcoins.node.config.NodeAppConfig
|
||||
import org.bitcoins.node.models.BroadcastAbleTransactionDAO
|
||||
import org.bitcoins.node.{NodeCallbacks, P2PLogger}
|
||||
import slick.jdbc.SQLiteProfile
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
@ -29,7 +28,7 @@ case class DataMessageHandler(
|
||||
chainConfig: ChainAppConfig)
|
||||
extends P2PLogger {
|
||||
|
||||
private val txDAO = BroadcastAbleTransactionDAO(SQLiteProfile)
|
||||
private val txDAO = BroadcastAbleTransactionDAO()
|
||||
|
||||
def handleDataPayload(
|
||||
payload: DataPayload,
|
||||
|
@ -6,6 +6,8 @@ import com.typesafe.config._
|
||||
import org.bitcoins.server.BitcoinSAppConfig
|
||||
import org.bitcoins.testkit.util.FileUtil
|
||||
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
object BitcoinSTestAppConfig {
|
||||
|
||||
/** Generates a temp directory with the prefix 'bitcoin-s- */
|
||||
@ -17,7 +19,8 @@ object BitcoinSTestAppConfig {
|
||||
* 1) Data directory is set to user temp directory
|
||||
* 2) Logging is turned down to WARN
|
||||
*/
|
||||
def getSpvTestConfig(config: Config*): BitcoinSAppConfig = {
|
||||
def getSpvTestConfig(config: Config*)(
|
||||
implicit ec: ExecutionContext): BitcoinSAppConfig = {
|
||||
val overrideConf = ConfigFactory.parseString {
|
||||
"""
|
||||
|bitcoin-s {
|
||||
@ -30,7 +33,8 @@ object BitcoinSTestAppConfig {
|
||||
BitcoinSAppConfig(tmpDir(), (overrideConf +: config): _*)
|
||||
}
|
||||
|
||||
def getNeutrinoTestConfig(config: Config*): BitcoinSAppConfig = {
|
||||
def getNeutrinoTestConfig(config: Config*)(
|
||||
implicit ec: ExecutionContext): BitcoinSAppConfig = {
|
||||
val overrideConf = ConfigFactory.parseString {
|
||||
"""
|
||||
|bitcoin-s {
|
||||
|
@ -9,7 +9,6 @@ import org.bitcoins.chain.api.ChainApi
|
||||
import org.bitcoins.chain.blockchain.ChainHandler
|
||||
import org.bitcoins.chain.blockchain.sync.ChainSync
|
||||
import org.bitcoins.chain.config.ChainAppConfig
|
||||
import org.bitcoins.chain.db.ChainDbManagement
|
||||
import org.bitcoins.chain.models._
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.protocol.blockchain.{Block, BlockHeader}
|
||||
@ -456,21 +455,20 @@ object ChainUnitTest extends ChainVerificationLogger {
|
||||
|
||||
/** Creates the [[org.bitcoins.chain.models.BlockHeaderTable]] */
|
||||
private def setupHeaderTable()(
|
||||
implicit appConfig: ChainAppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
ChainDbManagement.createHeaderTable(createIfNotExists = true)
|
||||
implicit appConfig: ChainAppConfig): Future[Unit] = {
|
||||
appConfig.createHeaderTable(createIfNotExists = true)
|
||||
}
|
||||
|
||||
def setupAllTables()(
|
||||
implicit appConfig: ChainAppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
ChainDbManagement.createAll()
|
||||
appConfig.createAll()
|
||||
}
|
||||
|
||||
def destroyAllTables()(
|
||||
implicit appConfig: ChainAppConfig,
|
||||
ec: ExecutionContext): Future[Unit] = {
|
||||
ChainDbManagement.dropAll()
|
||||
appConfig.dropAll()
|
||||
}
|
||||
|
||||
/** Creates the [[org.bitcoins.chain.models.BlockHeaderTable]] and inserts the genesis header */
|
||||
|
@ -1,17 +1,15 @@
|
||||
package org.bitcoins.testkit.fixtures
|
||||
|
||||
import org.bitcoins.node.db.NodeDbManagement
|
||||
import org.bitcoins.node.models.BroadcastAbleTransactionDAO
|
||||
import org.bitcoins.testkit.node.NodeUnitTest
|
||||
import org.scalatest._
|
||||
import slick.jdbc.SQLiteProfile
|
||||
|
||||
case class NodeDAOs(txDAO: BroadcastAbleTransactionDAO)
|
||||
|
||||
/** Provides a fixture where all DAOs used by the node projects are provided */
|
||||
trait NodeDAOFixture extends NodeUnitTest {
|
||||
private lazy val daos = {
|
||||
val tx = BroadcastAbleTransactionDAO(SQLiteProfile)
|
||||
val tx = BroadcastAbleTransactionDAO()
|
||||
NodeDAOs(tx)
|
||||
}
|
||||
|
||||
@ -19,10 +17,10 @@ trait NodeDAOFixture extends NodeUnitTest {
|
||||
|
||||
def withFixture(test: OneArgAsyncTest): FutureOutcome =
|
||||
makeFixture(build = () =>
|
||||
NodeDbManagement
|
||||
.createAll()(nodeConfig, executionContext)
|
||||
nodeConfig
|
||||
.createAll()(executionContext)
|
||||
.map(_ => daos),
|
||||
destroy = () =>
|
||||
NodeDbManagement
|
||||
.dropAll()(nodeConfig, executionContext))(test)
|
||||
nodeConfig
|
||||
.dropAll()(executionContext))(test)
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
package org.bitcoins.testkit.fixtures
|
||||
|
||||
import org.bitcoins.testkit.wallet.BitcoinSWalletTest
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.models._
|
||||
import org.scalatest._
|
||||
import org.scalatest.flatspec.FixtureAsyncFlatSpec
|
||||
import org.bitcoins.wallet.models._
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.db.WalletDbManagement
|
||||
|
||||
case class WalletDAOs(
|
||||
accountDAO: AccountDAO,
|
||||
@ -32,6 +31,6 @@ trait WalletDAOFixture extends FixtureAsyncFlatSpec with BitcoinSWalletTest {
|
||||
implicit private val walletConfig: WalletAppConfig = config
|
||||
|
||||
def withFixture(test: OneArgAsyncTest): FutureOutcome =
|
||||
makeFixture(build = () => WalletDbManagement.createAll().map(_ => daos),
|
||||
destroy = () => WalletDbManagement.dropAll())(test)
|
||||
makeFixture(build = () => walletConfig.createAll().map(_ => daos),
|
||||
destroy = () => walletConfig.dropAll())(test)
|
||||
}
|
||||
|
@ -24,7 +24,6 @@ import org.bitcoins.testkit.util.FileUtil
|
||||
import org.bitcoins.testkit.wallet.FundWalletUtil.FundedWallet
|
||||
import org.bitcoins.wallet.api.{LockedWalletApi, UnlockedWalletApi}
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.db.WalletDbManagement
|
||||
import org.bitcoins.wallet.{Wallet, WalletLogger}
|
||||
import org.scalatest._
|
||||
|
||||
@ -553,16 +552,16 @@ object BitcoinSWalletTest extends WalletLogger {
|
||||
} yield ()
|
||||
}
|
||||
|
||||
def destroyWallet(wallet: UnlockedWalletApi)(
|
||||
implicit ec: ExecutionContext): Future[Unit] = {
|
||||
def destroyWallet(wallet: UnlockedWalletApi): Future[Unit] = {
|
||||
destroyWallet(wallet.lock())
|
||||
}
|
||||
|
||||
def destroyWallet(wallet: LockedWalletApi)(
|
||||
implicit ec: ExecutionContext): Future[Unit] = {
|
||||
val destroyWalletF = WalletDbManagement
|
||||
.dropAll()(config = wallet.walletConfig, ec = ec)
|
||||
.map(_ => ())
|
||||
def destroyWallet(wallet: LockedWalletApi): Future[Unit] = {
|
||||
import wallet.walletConfig.ec
|
||||
val destroyWalletF =
|
||||
wallet.walletConfig
|
||||
.dropAll()
|
||||
.map(_ => ())
|
||||
destroyWalletF
|
||||
}
|
||||
|
||||
|
@ -1,37 +1,21 @@
|
||||
package org.bitcoins.testkit.db
|
||||
|
||||
import org.bitcoins.testkit.util.BitcoinSUnitTest
|
||||
import org.bitcoins.testkit.Implicits._
|
||||
import org.bitcoins.server.BitcoinSAppConfig
|
||||
import org.bitcoins.server.BitcoinSAppConfig._
|
||||
import com.typesafe.config.ConfigFactory
|
||||
import org.bitcoins.core.config.TestNet3
|
||||
import org.bitcoins.chain.models.BlockHeaderDAO
|
||||
import akka.actor.ActorSystem
|
||||
import scala.concurrent.ExecutionContext
|
||||
import org.bitcoins.wallet.models.AccountDAO
|
||||
import org.bitcoins.testkit.chain.ChainTestUtil
|
||||
import org.bitcoins.chain.models.BlockHeaderDb
|
||||
import org.bitcoins.chain.models.BlockHeaderDbHelper
|
||||
import org.bitcoins.wallet.models.AccountDb
|
||||
import org.bitcoins.core.hd.HDAccount
|
||||
import org.bitcoins.core.hd.HDCoin
|
||||
import org.bitcoins.core.hd.HDPurposes
|
||||
import org.bitcoins.core.hd.HDCoinType
|
||||
import org.bitcoins.testkit.core.gen.CryptoGenerators
|
||||
import org.bitcoins.node.db.NodeDbManagement
|
||||
import org.bitcoins.db.DbManagement
|
||||
import org.bitcoins.wallet.db.WalletDbManagement
|
||||
import org.bitcoins.db.SQLiteTableInfo
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import org.bitcoins.db.CRUD
|
||||
import java.nio.file.Files
|
||||
|
||||
import com.typesafe.config.ConfigFactory
|
||||
import org.bitcoins.chain.models.BlockHeaderDAO
|
||||
import org.bitcoins.core.config.TestNet3
|
||||
import org.bitcoins.core.hd.{HDAccount, HDCoin, HDCoinType, HDPurposes}
|
||||
import org.bitcoins.db.{CRUD, SQLiteTableInfo}
|
||||
import org.bitcoins.server.BitcoinSAppConfig._
|
||||
import org.bitcoins.testkit.BitcoinSTestAppConfig
|
||||
import org.bitcoins.testkit.Implicits._
|
||||
import org.bitcoins.testkit.chain.ChainTestUtil
|
||||
import org.bitcoins.testkit.core.gen.CryptoGenerators
|
||||
import org.bitcoins.testkit.util.BitcoinSAsyncTest
|
||||
import org.bitcoins.wallet.models.{AccountDAO, AccountDb}
|
||||
|
||||
class AppConfigTest extends BitcoinSUnitTest {
|
||||
|
||||
val system = ActorSystem()
|
||||
implicit val ec: ExecutionContext = system.dispatcher
|
||||
class AppConfigTest extends BitcoinSAsyncTest {
|
||||
|
||||
behavior of "BitcoinSAppConfig"
|
||||
|
||||
@ -70,58 +54,4 @@ class AppConfigTest extends BitcoinSUnitTest {
|
||||
assert(walletConf.dbName != nodeConf.dbName)
|
||||
}
|
||||
|
||||
it must "be able to write to distinct databases" in {
|
||||
implicit val config = BitcoinSTestAppConfig.getSpvTestConfig()
|
||||
val chainConf = config.chainConf
|
||||
val walletConf = config.walletConf
|
||||
val nodeConf = config.nodeConf
|
||||
val allConfs = List(chainConf, walletConf, nodeConf)
|
||||
|
||||
val bhDAO = BlockHeaderDAO()
|
||||
val accountDAO = AccountDAO()
|
||||
|
||||
allConfs.foreach { conf =>
|
||||
val fullDbPath = conf.dbPath.resolve(conf.dbName)
|
||||
assert(!Files.exists(fullDbPath))
|
||||
}
|
||||
|
||||
val writeF = {
|
||||
for {
|
||||
_ <- config.initialize()
|
||||
_ = {
|
||||
allConfs.foreach { conf =>
|
||||
val fullDbPath = conf.dbPath.resolve(conf.dbName)
|
||||
assert(Files.isRegularFile(fullDbPath))
|
||||
}
|
||||
}
|
||||
_ <- {
|
||||
bhDAO.create(ChainTestUtil.regTestGenesisHeaderDb)
|
||||
}
|
||||
_ <- {
|
||||
val hdAccount =
|
||||
HDAccount(HDCoin(HDPurposes.Legacy, HDCoinType.Bitcoin), 0)
|
||||
val xpub = CryptoGenerators.extPublicKey.sampleSome
|
||||
val account = AccountDb(xpub, hdAccount)
|
||||
accountDAO.create(account)
|
||||
}
|
||||
} yield ()
|
||||
}
|
||||
|
||||
for {
|
||||
_ <- writeF
|
||||
nodeTables <- NodeDbManagement.listTables(bhDAO.database)
|
||||
walletTables <- WalletDbManagement.listTables(accountDAO.database)
|
||||
} yield {
|
||||
def hasTable(tables: Seq[SQLiteTableInfo], dao: CRUD[_, _]): Boolean =
|
||||
tables.exists(_.name == dao.table.baseTableRow.tableName)
|
||||
|
||||
assert(hasTable(walletTables, accountDAO))
|
||||
assert(!hasTable(walletTables, bhDAO))
|
||||
|
||||
assert(hasTable(nodeTables, bhDAO))
|
||||
assert(!hasTable(nodeTables, accountDAO))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
package org.bitcoins.wallet
|
||||
|
||||
import org.bitcoins.testkit.util.BitcoinSUnitTest
|
||||
import org.bitcoins.testkit.util.{BitcoinSAsyncTest, BitcoinSUnitTest}
|
||||
import org.bitcoins.node.config.NodeAppConfig
|
||||
import org.bitcoins.core.config.TestNet3
|
||||
import com.typesafe.config.Config
|
||||
@ -9,13 +9,16 @@ import org.bitcoins.core.config.RegTest
|
||||
import org.bitcoins.core.config.MainNet
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import java.nio.file.Paths
|
||||
|
||||
import org.bitcoins.core.hd.HDPurposes
|
||||
import java.nio.file.Files
|
||||
|
||||
import ch.qos.logback.classic.Level
|
||||
import java.nio.file.Path
|
||||
|
||||
import scala.util.Properties
|
||||
|
||||
class WalletAppConfigTest extends BitcoinSUnitTest {
|
||||
class WalletAppConfigTest extends BitcoinSAsyncTest {
|
||||
|
||||
val tempDir = Files.createTempDirectory("bitcoin-s")
|
||||
val config = WalletAppConfig(directory = tempDir)
|
||||
|
@ -6,7 +6,7 @@ import java.util.concurrent.TimeUnit
|
||||
import com.typesafe.config.Config
|
||||
import org.bitcoins.core.hd._
|
||||
import org.bitcoins.core.util.FutureUtil
|
||||
import org.bitcoins.db.AppConfig
|
||||
import org.bitcoins.db.{AppConfig, JdbcProfileComponent}
|
||||
import org.bitcoins.keymanager.{KeyManagerParams, WalletStorage}
|
||||
import org.bitcoins.wallet.db.WalletDbManagement
|
||||
|
||||
@ -19,8 +19,10 @@ import scala.concurrent.{ExecutionContext, Future}
|
||||
*/
|
||||
case class WalletAppConfig(
|
||||
private val directory: Path,
|
||||
private val conf: Config*)
|
||||
extends AppConfig {
|
||||
private val conf: Config*)(implicit override val ec: ExecutionContext)
|
||||
extends AppConfig
|
||||
with WalletDbManagement
|
||||
with JdbcProfileComponent[WalletAppConfig] {
|
||||
override protected[bitcoins] def configOverrides: List[Config] = conf.toList
|
||||
override protected[bitcoins] def moduleName: String = "wallet"
|
||||
override protected[bitcoins] type ConfigType = WalletAppConfig
|
||||
@ -30,6 +32,8 @@ case class WalletAppConfig(
|
||||
|
||||
protected[bitcoins] def baseDatadir: Path = directory
|
||||
|
||||
override def appConfig: WalletAppConfig = this
|
||||
|
||||
lazy val defaultAccountKind: HDPurpose =
|
||||
config.getString("wallet.defaultAccountType") match {
|
||||
case "legacy" => HDPurposes.Legacy
|
||||
@ -79,7 +83,7 @@ case class WalletAppConfig(
|
||||
}
|
||||
|
||||
val numMigrations = {
|
||||
WalletDbManagement.migrate(this)
|
||||
migrate()
|
||||
}
|
||||
|
||||
logger.info(s"Applied $numMigrations to the wallet project")
|
||||
@ -127,6 +131,7 @@ object WalletAppConfig {
|
||||
/** Constructs a wallet configuration from the default Bitcoin-S
|
||||
* data directory and given list of configuration overrides.
|
||||
*/
|
||||
def fromDefaultDatadir(confs: Config*): WalletAppConfig =
|
||||
def fromDefaultDatadir(confs: Config*)(
|
||||
implicit ec: ExecutionContext): WalletAppConfig =
|
||||
WalletAppConfig(AppConfig.DEFAULT_BITCOIN_S_DATADIR, confs: _*)
|
||||
}
|
||||
|
@ -1,25 +1,51 @@
|
||||
package org.bitcoins.wallet.db
|
||||
|
||||
import org.bitcoins.db.DbManagement
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import org.bitcoins.wallet.models._
|
||||
import org.bitcoins.db.{DbManagement, JdbcProfileComponent}
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import org.bitcoins.wallet.models.{
|
||||
AccountDAO,
|
||||
AddressDAO,
|
||||
IncomingTransactionDAO,
|
||||
OutgoingTransactionDAO,
|
||||
SpendingInfoDAO,
|
||||
TransactionDAO
|
||||
}
|
||||
|
||||
sealed abstract class WalletDbManagement extends DbManagement {
|
||||
private val accountTable = TableQuery[AccountTable]
|
||||
private val addressTable = TableQuery[AddressTable]
|
||||
private val utxoTable = TableQuery[SpendingInfoTable]
|
||||
private val txTable = TableQuery[TransactionTable]
|
||||
private val incomingTxTable = TableQuery[IncomingTransactionTable]
|
||||
private val outgoingTxTable = TableQuery[OutgoingTransactionTable]
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
override val allTables: List[TableQuery[_ <: Table[_]]] =
|
||||
trait WalletDbManagement extends DbManagement {
|
||||
_: JdbcProfileComponent[WalletAppConfig] =>
|
||||
|
||||
import profile.api._
|
||||
|
||||
def ec: ExecutionContext
|
||||
|
||||
private lazy val accountTable: TableQuery[Table[_]] = {
|
||||
AccountDAO()(ec, appConfig).table
|
||||
}
|
||||
private lazy val addressTable: TableQuery[Table[_]] = {
|
||||
AddressDAO()(ec, appConfig).table
|
||||
}
|
||||
private lazy val utxoTable: TableQuery[Table[_]] = {
|
||||
SpendingInfoDAO()(ec, appConfig).table
|
||||
}
|
||||
private lazy val txTable: TableQuery[Table[_]] = {
|
||||
TransactionDAO()(ec, appConfig).table
|
||||
}
|
||||
private lazy val incomingTxTable: TableQuery[Table[_]] = {
|
||||
IncomingTransactionDAO()(ec, appConfig).table
|
||||
}
|
||||
private lazy val outgoingTxTable: TableQuery[Table[_]] = {
|
||||
OutgoingTransactionDAO()(ec, appConfig).table
|
||||
}
|
||||
|
||||
override lazy val allTables: List[TableQuery[Table[_]]] = {
|
||||
List(accountTable,
|
||||
addressTable,
|
||||
utxoTable,
|
||||
txTable,
|
||||
incomingTxTable,
|
||||
outgoingTxTable)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object WalletDbManagement extends WalletDbManagement
|
||||
|
@ -1,29 +1,31 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.ExtPublicKey
|
||||
import org.bitcoins.core.hd._
|
||||
import org.bitcoins.db.{CRUD, SlickUtil}
|
||||
import org.bitcoins.wallet.config._
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.{PrimaryKey, ProvenShape}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class AccountDAO()(
|
||||
implicit val ec: ExecutionContext,
|
||||
val appConfig: WalletAppConfig)
|
||||
extends CRUD[AccountDb, (HDCoin, Int)] {
|
||||
|
||||
override val appConfig: WalletAppConfig)
|
||||
extends CRUD[AccountDb, (HDCoin, Int)]
|
||||
with SlickUtil[AccountDb, (HDCoin, Int)] {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
import profile.api._
|
||||
|
||||
override val table: TableQuery[AccountTable] = TableQuery[AccountTable]
|
||||
|
||||
override def createAll(ts: Vector[AccountDb]): Future[Vector[AccountDb]] =
|
||||
SlickUtil.createAllNoAutoInc(ts, database, table)
|
||||
createAllNoAutoInc(ts, safeDatabase)
|
||||
|
||||
override protected def findByPrimaryKeys(
|
||||
ids: Vector[(HDCoin, Int)]): Query[Table[_], AccountDb, Seq] = ???
|
||||
ids: Vector[(HDCoin, Int)]): Query[AccountTable, AccountDb, Seq] = ???
|
||||
|
||||
override def findByPrimaryKey(
|
||||
id: (HDCoin, Int)): Query[Table[_], AccountDb, Seq] = {
|
||||
id: (HDCoin, Int)): Query[AccountTable, AccountDb, Seq] = {
|
||||
val (coin, index) = id
|
||||
table
|
||||
.filter(_.coinType === coin.coinType)
|
||||
@ -32,7 +34,7 @@ case class AccountDAO()(
|
||||
}
|
||||
|
||||
override def findAll(
|
||||
accounts: Vector[AccountDb]): Query[Table[_], AccountDb, Seq] =
|
||||
accounts: Vector[AccountDb]): Query[AccountTable, AccountDb, Seq] =
|
||||
findByPrimaryKeys(
|
||||
accounts.map(acc => (acc.hdAccount.coin, acc.hdAccount.index)))
|
||||
|
||||
@ -53,4 +55,39 @@ case class AccountDAO()(
|
||||
s"More than one account per account=${account}, got=${accounts}")
|
||||
}
|
||||
}
|
||||
|
||||
class AccountTable(tag: Tag)
|
||||
extends Table[AccountDb](tag, "wallet_accounts") {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def xpub: Rep[ExtPublicKey] = column[ExtPublicKey]("xpub")
|
||||
|
||||
def purpose: Rep[HDPurpose] = column[HDPurpose]("hd_purpose")
|
||||
|
||||
def coinType: Rep[HDCoinType] = column[HDCoinType]("coin")
|
||||
|
||||
def index: Rep[Int] = column[Int]("account_index")
|
||||
|
||||
private type AccountTuple = (HDPurpose, ExtPublicKey, HDCoinType, Int)
|
||||
|
||||
private val fromTuple: AccountTuple => AccountDb = {
|
||||
case (purpose, pub, coin, index) =>
|
||||
AccountDb(pub, HDAccount(HDCoin(purpose, coin), index))
|
||||
}
|
||||
|
||||
private val toTuple: AccountDb => Option[AccountTuple] = account =>
|
||||
Some(
|
||||
(account.hdAccount.purpose,
|
||||
account.xpub,
|
||||
account.hdAccount.coin.coinType,
|
||||
account.hdAccount.index))
|
||||
|
||||
def * : ProvenShape[AccountDb] =
|
||||
(purpose, xpub, coinType, index) <> (fromTuple, toTuple)
|
||||
|
||||
def primaryKey: PrimaryKey =
|
||||
primaryKey("pk_account", sourceColumns = (purpose, coinType, index))
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,16 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.{ExtPublicKey, _}
|
||||
import org.bitcoins.core.hd._
|
||||
import org.bitcoins.keymanager.util.HDUtil
|
||||
|
||||
/** Represents the xpub at the account level, NOT the root xpub
|
||||
* that in conjunction with the path specified in hdAccount
|
||||
* can be used to generate the account level xpub */
|
||||
case class AccountDb(xpub: ExtPublicKey, hdAccount: HDAccount) {
|
||||
def xpubVersion: ExtKeyPubVersion = xpub.version
|
||||
|
||||
def xprivVersion: ExtKeyPrivVersion =
|
||||
HDUtil.getMatchingExtKeyVersion(xpubVersion)
|
||||
|
||||
}
|
@ -1,36 +1,60 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.ECPublicKey
|
||||
import org.bitcoins.core.hd.{HDAccount, HDChainType}
|
||||
import org.bitcoins.core.protocol.BitcoinAddress
|
||||
import org.bitcoins.core.protocol.script.ScriptPubKey
|
||||
import org.bitcoins.core.crypto.{ECPublicKey, Sha256Hash160Digest}
|
||||
import org.bitcoins.core.hd.{
|
||||
HDAccount,
|
||||
HDChainType,
|
||||
HDCoinType,
|
||||
HDPurpose,
|
||||
HDPurposes,
|
||||
LegacyHDPath,
|
||||
NestedSegWitHDPath,
|
||||
SegWitHDPath
|
||||
}
|
||||
import org.bitcoins.core.protocol.{
|
||||
Bech32Address,
|
||||
BitcoinAddress,
|
||||
P2PKHAddress,
|
||||
P2SHAddress
|
||||
}
|
||||
import org.bitcoins.core.protocol.script.{ScriptPubKey, ScriptWitness}
|
||||
import org.bitcoins.core.script.ScriptType
|
||||
import org.bitcoins.db.{CRUD, SlickUtil}
|
||||
import org.bitcoins.wallet.config.WalletAppConfig
|
||||
import slick.dbio.Effect
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.TableQuery
|
||||
import slick.sql.SqlAction
|
||||
import slick.lifted.{ForeignKeyQuery, ProvenShape}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class AddressDAO()(
|
||||
implicit ec: ExecutionContext,
|
||||
config: WalletAppConfig
|
||||
) extends CRUD[AddressDb, BitcoinAddress] {
|
||||
) extends CRUD[AddressDb, BitcoinAddress]
|
||||
with SlickUtil[AddressDb, BitcoinAddress] {
|
||||
import profile.api._
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
override val table: TableQuery[AddressTable] = TableQuery[AddressTable]
|
||||
private val spendingInfoTable = TableQuery[SpendingInfoTable]
|
||||
override val table: profile.api.TableQuery[AddressTable] =
|
||||
TableQuery[AddressTable]
|
||||
private lazy val spendingInfoTable: profile.api.TableQuery[
|
||||
SpendingInfoDAO#SpendingInfoTable] = {
|
||||
SpendingInfoDAO().table
|
||||
}
|
||||
|
||||
private lazy val accountTable: slick.lifted.TableQuery[
|
||||
AccountDAO#AccountTable] = {
|
||||
AccountDAO().table
|
||||
}
|
||||
|
||||
override def createAll(ts: Vector[AddressDb]): Future[Vector[AddressDb]] =
|
||||
SlickUtil.createAllNoAutoInc(ts, database, table)
|
||||
createAllNoAutoInc(ts, safeDatabase)
|
||||
|
||||
/** Finds the rows that correlate to the given primary keys */
|
||||
override def findByPrimaryKeys(
|
||||
addresses: Vector[BitcoinAddress]): Query[Table[_], AddressDb, Seq] =
|
||||
addresses: Vector[BitcoinAddress]): Query[AddressTable, AddressDb, Seq] =
|
||||
table.filter(_.address.inSet(addresses))
|
||||
|
||||
override def findAll(ts: Vector[AddressDb]): Query[Table[_], AddressDb, Seq] =
|
||||
override def findAll(
|
||||
ts: Vector[AddressDb]): Query[AddressTable, AddressDb, Seq] =
|
||||
findByPrimaryKeys(ts.map(_.address))
|
||||
|
||||
def findAddress(addr: BitcoinAddress): Future[Option[AddressDb]] = {
|
||||
@ -57,29 +81,34 @@ case class AddressDAO()(
|
||||
val query =
|
||||
findMostRecentForChain(hdAccount, HDChainType.Change)
|
||||
|
||||
database.run(query)
|
||||
safeDatabase.run(query)
|
||||
}
|
||||
|
||||
/** Finds all public keys in the wallet */
|
||||
def findAllPubkeys(): Future[Vector[ECPublicKey]] = {
|
||||
val query = table.map(_.ecPublicKey).distinct
|
||||
database.run(query.result).map(_.toVector)
|
||||
safeDatabase.run(query.result).map(_.toVector)
|
||||
}
|
||||
|
||||
/** Finds all SPKs in the wallet */
|
||||
def findAllSPKs(): Future[Vector[ScriptPubKey]] = {
|
||||
val query = table.map(_.scriptPubKey).distinct
|
||||
database.run(query.result).map(_.toVector)
|
||||
safeDatabase.run(query.result).map(_.toVector)
|
||||
}
|
||||
|
||||
def getUnusedAddresses: Future[Vector[AddressDb]] = {
|
||||
val query = {
|
||||
val query: slick.lifted.Query[
|
||||
(AddressTable, _),
|
||||
(AddressTable#TableElementType, Option[SpendingInfoDb]),
|
||||
Seq] = {
|
||||
val joined =
|
||||
table.joinLeft(spendingInfoTable).on(_.scriptPubKey === _.scriptPubKey)
|
||||
table
|
||||
.joinLeft(spendingInfoTable)
|
||||
.on(_.scriptPubKey === _.scriptPubKey)
|
||||
joined.filter(_._2.isEmpty)
|
||||
}
|
||||
|
||||
database.runVec(query.result).map(_.map(_._1))
|
||||
safeDatabase.runVec(query.result).map(_.map(_._1))
|
||||
}
|
||||
|
||||
def getUnusedAddresses(hdAccount: HDAccount): Future[Vector[AddressDb]] = {
|
||||
@ -88,7 +117,10 @@ case class AddressDAO()(
|
||||
|
||||
private def findMostRecentForChain(
|
||||
account: HDAccount,
|
||||
chain: HDChainType): SqlAction[Option[AddressDb], NoStream, Effect.Read] = {
|
||||
chain: HDChainType): slick.sql.SqlAction[
|
||||
Option[AddressDb],
|
||||
NoStream,
|
||||
Effect.Read] = {
|
||||
addressesForAccountQuery(account.index)
|
||||
.filter(_.purpose === account.purpose)
|
||||
.filter(_.accountCoin === account.coin.coinType)
|
||||
@ -106,6 +138,185 @@ case class AddressDAO()(
|
||||
hdAccount: HDAccount): Future[Option[AddressDb]] = {
|
||||
val query =
|
||||
findMostRecentForChain(hdAccount, HDChainType.External)
|
||||
database.run(query)
|
||||
safeDatabase.run(query)
|
||||
}
|
||||
|
||||
/**
|
||||
* todo: this needs design rework.
|
||||
* todo: https://github.com/bitcoin-s/bitcoin-s-core/pull/391#discussion_r274188334
|
||||
*/
|
||||
class AddressTable(tag: Tag) extends Table[AddressDb](tag, "addresses") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def purpose: Rep[HDPurpose] = column("hd_purpose")
|
||||
|
||||
def accountCoin: Rep[HDCoinType] = column("hd_coin")
|
||||
|
||||
def accountIndex: Rep[Int] = column("account_index")
|
||||
|
||||
def accountChainType: Rep[HDChainType] = column("hd_chain_type")
|
||||
|
||||
def addressIndex: Rep[Int] = column("address_index")
|
||||
|
||||
def address: Rep[BitcoinAddress] = column("address", O.PrimaryKey)
|
||||
|
||||
def ecPublicKey: Rep[ECPublicKey] = column("pubkey")
|
||||
|
||||
def hashedPubKey: Rep[Sha256Hash160Digest] = column("hashed_pubkey")
|
||||
|
||||
def scriptType: Rep[ScriptType] = column("script_type")
|
||||
|
||||
def scriptPubKey: Rep[ScriptPubKey] = column("script_pub_key", O.Unique)
|
||||
|
||||
def scriptWitness: Rep[Option[ScriptWitness]] = column("script_witness")
|
||||
|
||||
private type AddressTuple = (
|
||||
HDPurpose,
|
||||
Int,
|
||||
HDCoinType,
|
||||
HDChainType,
|
||||
BitcoinAddress,
|
||||
Option[ScriptWitness],
|
||||
ScriptPubKey,
|
||||
Int,
|
||||
ECPublicKey,
|
||||
Sha256Hash160Digest,
|
||||
ScriptType)
|
||||
|
||||
private val fromTuple: AddressTuple => AddressDb = {
|
||||
case (
|
||||
purpose,
|
||||
accountIndex,
|
||||
accountCoin,
|
||||
accountChain,
|
||||
address,
|
||||
scriptWitnessOpt,
|
||||
scriptPubKey,
|
||||
addressIndex,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
scriptType @ _ // what should we do about this? scriptType is inferrable from purpose
|
||||
) =>
|
||||
(purpose, address, scriptWitnessOpt) match {
|
||||
case (HDPurposes.SegWit,
|
||||
bechAddr: Bech32Address,
|
||||
Some(scriptWitness)) =>
|
||||
val path =
|
||||
SegWitHDPath(coinType = accountCoin,
|
||||
accountIndex = accountIndex,
|
||||
chainType = accountChain,
|
||||
addressIndex = addressIndex)
|
||||
|
||||
SegWitAddressDb(path,
|
||||
ecPublicKey = pubKey,
|
||||
hashedPubKey = hashedPubKey,
|
||||
address = bechAddr,
|
||||
witnessScript = scriptWitness,
|
||||
scriptPubKey = scriptPubKey)
|
||||
|
||||
case (HDPurposes.Legacy, legacyAddr: P2PKHAddress, None) =>
|
||||
val path = LegacyHDPath(coinType = accountCoin,
|
||||
accountIndex = accountIndex,
|
||||
chainType = accountChain,
|
||||
addressIndex = addressIndex)
|
||||
LegacyAddressDb(path,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
legacyAddr,
|
||||
scriptPubKey = scriptPubKey)
|
||||
|
||||
case (HDPurposes.NestedSegWit,
|
||||
address: P2SHAddress,
|
||||
Some(scriptWitness)) =>
|
||||
val path = NestedSegWitHDPath(coinType = accountCoin,
|
||||
accountIndex = accountIndex,
|
||||
chainType = accountChain,
|
||||
addressIndex = addressIndex)
|
||||
NestedSegWitAddressDb(path,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
address,
|
||||
witnessScript = scriptWitness,
|
||||
scriptPubKey = scriptPubKey)
|
||||
case (purpose: HDPurpose,
|
||||
address: BitcoinAddress,
|
||||
scriptWitnessOpt) =>
|
||||
throw new IllegalArgumentException(
|
||||
s"Got invalid combination of HD purpose, address and script witness: $purpose, $address, $scriptWitnessOpt")
|
||||
}
|
||||
}
|
||||
|
||||
private val toTuple: AddressDb => Option[AddressTuple] = {
|
||||
case SegWitAddressDb(path,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
address,
|
||||
scriptWitness,
|
||||
scriptPubKey) =>
|
||||
Some(
|
||||
(path.purpose,
|
||||
path.account.index,
|
||||
path.coin.coinType,
|
||||
path.chain.chainType,
|
||||
address,
|
||||
Some(scriptWitness),
|
||||
scriptPubKey,
|
||||
path.address.index,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
ScriptType.WITNESS_V0_KEYHASH))
|
||||
case LegacyAddressDb(path, pubkey, hashedPub, address, scriptPubKey) =>
|
||||
Some(
|
||||
path.purpose,
|
||||
path.account.index,
|
||||
path.coin.coinType,
|
||||
path.chain.chainType,
|
||||
address,
|
||||
None, // scriptwitness
|
||||
scriptPubKey,
|
||||
path.address.index,
|
||||
pubkey,
|
||||
hashedPub,
|
||||
ScriptType.PUBKEYHASH
|
||||
)
|
||||
case NestedSegWitAddressDb(path,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
address,
|
||||
scriptWitness,
|
||||
scriptPubKey) =>
|
||||
Some(
|
||||
(path.purpose,
|
||||
path.account.index,
|
||||
path.coin.coinType,
|
||||
path.chain.chainType,
|
||||
address,
|
||||
Some(scriptWitness),
|
||||
scriptPubKey,
|
||||
path.address.index,
|
||||
pubKey,
|
||||
hashedPubKey,
|
||||
ScriptType.SCRIPTHASH))
|
||||
}
|
||||
|
||||
override def * : ProvenShape[AddressDb] =
|
||||
(purpose,
|
||||
accountIndex,
|
||||
accountCoin,
|
||||
accountChainType,
|
||||
address,
|
||||
scriptWitness,
|
||||
scriptPubKey,
|
||||
addressIndex,
|
||||
ecPublicKey,
|
||||
hashedPubKey,
|
||||
scriptType) <> (fromTuple, toTuple)
|
||||
|
||||
def fk: ForeignKeyQuery[_, AccountDb] =
|
||||
foreignKey("fk_account",
|
||||
sourceColumns = (purpose, accountCoin, accountIndex),
|
||||
targetTableQuery = accountTable) { accountTable =>
|
||||
(accountTable.purpose, accountTable.coinType, accountTable.index)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
134
wallet/src/main/scala/org/bitcoins/wallet/models/AddressDb.scala
Normal file
134
wallet/src/main/scala/org/bitcoins/wallet/models/AddressDb.scala
Normal file
@ -0,0 +1,134 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.config.NetworkParameters
|
||||
import org.bitcoins.core.crypto.{ECPublicKey, Sha256Hash160Digest}
|
||||
import org.bitcoins.core.hd._
|
||||
import org.bitcoins.core.protocol.script._
|
||||
import org.bitcoins.core.protocol.{
|
||||
Bech32Address,
|
||||
BitcoinAddress,
|
||||
P2PKHAddress,
|
||||
P2SHAddress
|
||||
}
|
||||
import org.bitcoins.core.script.ScriptType
|
||||
|
||||
sealed trait AddressDb {
|
||||
protected type PathType <: HDPath
|
||||
|
||||
def path: PathType
|
||||
def ecPublicKey: ECPublicKey
|
||||
def hashedPubKey: Sha256Hash160Digest
|
||||
def address: BitcoinAddress
|
||||
def scriptType: ScriptType
|
||||
def witnessScriptOpt: Option[ScriptWitness]
|
||||
def scriptPubKey: ScriptPubKey
|
||||
}
|
||||
|
||||
/** Segwit P2PKH */
|
||||
case class SegWitAddressDb(
|
||||
path: SegWitHDPath,
|
||||
ecPublicKey: ECPublicKey,
|
||||
hashedPubKey: Sha256Hash160Digest,
|
||||
address: Bech32Address,
|
||||
witnessScript: ScriptWitness,
|
||||
scriptPubKey: ScriptPubKey
|
||||
) extends AddressDb {
|
||||
override type PathType = SegWitHDPath
|
||||
|
||||
override val scriptType = ScriptType.WITNESS_V0_KEYHASH
|
||||
override val witnessScriptOpt = Some(witnessScript)
|
||||
}
|
||||
|
||||
/** Segwit P2PKH-in-P2SH */
|
||||
case class NestedSegWitAddressDb(
|
||||
path: NestedSegWitHDPath,
|
||||
ecPublicKey: ECPublicKey,
|
||||
hashedPubKey: Sha256Hash160Digest,
|
||||
address: P2SHAddress,
|
||||
witnessScript: ScriptWitness,
|
||||
scriptPubKey: ScriptPubKey
|
||||
) extends AddressDb {
|
||||
override type PathType = NestedSegWitHDPath
|
||||
|
||||
override val scriptType = ScriptType.SCRIPTHASH
|
||||
override val witnessScriptOpt = Some(witnessScript)
|
||||
}
|
||||
|
||||
/** P2PKH */
|
||||
case class LegacyAddressDb(
|
||||
path: LegacyHDPath,
|
||||
ecPublicKey: ECPublicKey,
|
||||
hashedPubKey: Sha256Hash160Digest,
|
||||
address: P2PKHAddress,
|
||||
scriptPubKey: ScriptPubKey
|
||||
) extends AddressDb {
|
||||
override type PathType = LegacyHDPath
|
||||
|
||||
override val scriptType = ScriptType.PUBKEYHASH
|
||||
override val witnessScriptOpt = None
|
||||
}
|
||||
// todo: make ADT for different addresses in DB, seeing as they have different fields
|
||||
// todo: indicate whether or not address has been spent to
|
||||
|
||||
object AddressDbHelper {
|
||||
|
||||
/** Get a Segwit pay-to-pubkeyhash address */
|
||||
def getSegwitAddress(
|
||||
pub: ECPublicKey,
|
||||
path: SegWitHDPath,
|
||||
np: NetworkParameters): SegWitAddressDb = {
|
||||
|
||||
val witnessSpk = P2WPKHWitnessSPKV0(pub)
|
||||
val scriptWitness = P2WPKHWitnessV0(pub)
|
||||
val addr = Bech32Address(witnessSpk, np)
|
||||
SegWitAddressDb(
|
||||
path = path,
|
||||
ecPublicKey = pub,
|
||||
hashedPubKey = witnessSpk.pubKeyHash,
|
||||
address = addr,
|
||||
witnessScript = scriptWitness,
|
||||
scriptPubKey = witnessSpk
|
||||
)
|
||||
}
|
||||
|
||||
/** Get a legacy pay-to-pubkeyhash address */
|
||||
def getLegacyAddress(
|
||||
pub: ECPublicKey,
|
||||
path: LegacyHDPath,
|
||||
np: NetworkParameters): LegacyAddressDb = {
|
||||
val spk = P2PKHScriptPubKey(pub)
|
||||
val addr = P2PKHAddress(spk, np)
|
||||
LegacyAddressDb(path = path,
|
||||
ecPublicKey = pub,
|
||||
hashedPubKey = spk.pubKeyHash,
|
||||
address = addr,
|
||||
scriptPubKey = spk)
|
||||
}
|
||||
|
||||
/** Get a nested Segwit pay-to-pubkeyhash address */
|
||||
def getNestedSegwitAddress(
|
||||
pub: ECPublicKey,
|
||||
path: NestedSegWitHDPath,
|
||||
np: NetworkParameters): NestedSegWitAddressDb = {
|
||||
val redeem = P2WPKHWitnessSPKV0(pub)
|
||||
val spk = P2SHScriptPubKey(redeem)
|
||||
val scriptWitness = P2WPKHWitnessV0(pub)
|
||||
val addr = P2SHAddress(spk, np)
|
||||
NestedSegWitAddressDb(path = path,
|
||||
ecPublicKey = pub,
|
||||
hashedPubKey = redeem.pubKeyHash,
|
||||
address = addr,
|
||||
witnessScript = scriptWitness,
|
||||
scriptPubKey = spk)
|
||||
}
|
||||
|
||||
/** Gets an address. Derives the correct type by looking at the kind of path passed in */
|
||||
def getAddress(
|
||||
pub: ECPublicKey,
|
||||
path: HDPath,
|
||||
np: NetworkParameters): AddressDb = path match {
|
||||
case legacy: LegacyHDPath => getLegacyAddress(pub, legacy, np)
|
||||
case nested: NestedSegWitHDPath => getNestedSegwitAddress(pub, nested, np)
|
||||
case segwit: SegWitHDPath => getSegwitAddress(pub, segwit, np)
|
||||
}
|
||||
}
|
@ -1,13 +1,55 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
import org.bitcoins.wallet.config._
|
||||
import slick.lifted.TableQuery
|
||||
import slick.lifted.{PrimaryKey, ProvenShape}
|
||||
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
case class IncomingTransactionDAO()(
|
||||
implicit val ec: ExecutionContext,
|
||||
val appConfig: WalletAppConfig)
|
||||
extends TxDAO[IncomingTransactionDb, IncomingTransactionTable] {
|
||||
override val table = TableQuery[IncomingTransactionTable]
|
||||
override val appConfig: WalletAppConfig)
|
||||
extends TxDAO[IncomingTransactionDb] {
|
||||
import profile.api._
|
||||
override val table: profile.api.TableQuery[IncomingTransactionTable] = {
|
||||
TableQuery[IncomingTransactionTable]
|
||||
}
|
||||
|
||||
private lazy val txTable: profile.api.TableQuery[
|
||||
TransactionDAO#TransactionTable] = {
|
||||
TransactionDAO().table
|
||||
}
|
||||
|
||||
class IncomingTransactionTable(tag: Tag)
|
||||
extends TxTable[IncomingTransactionDb](tag, "wallet_incoming_txs") {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def txIdBE: Rep[DoubleSha256DigestBE] = column("txIdBE", O.Unique)
|
||||
|
||||
def incomingAmount: Rep[CurrencyUnit] = column("incomingAmount")
|
||||
|
||||
private type IncomingTransactionTuple = (DoubleSha256DigestBE, CurrencyUnit)
|
||||
|
||||
private val fromTuple: IncomingTransactionTuple => IncomingTransactionDb = {
|
||||
case (txId, incomingAmount) =>
|
||||
IncomingTransactionDb(txId, incomingAmount)
|
||||
}
|
||||
|
||||
private val toTuple: IncomingTransactionDb => Option[
|
||||
IncomingTransactionTuple] = tx => Some((tx.txIdBE, tx.incomingAmount))
|
||||
|
||||
def * : ProvenShape[IncomingTransactionDb] =
|
||||
(txIdBE, incomingAmount) <> (fromTuple, toTuple)
|
||||
|
||||
def primaryKey: PrimaryKey =
|
||||
primaryKey("pk_tx", sourceColumns = txIdBE)
|
||||
|
||||
def fk_underlying_tx: slick.lifted.ForeignKeyQuery[_, TransactionDb] = {
|
||||
foreignKey("fk_underlying_tx",
|
||||
sourceColumns = txIdBE,
|
||||
targetTableQuery = txTable)(_.txIdBE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,15 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto._
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
|
||||
/**
|
||||
* Represents a relevant transaction for the wallet that we should be keeping track of
|
||||
* @param txIdBE Transaction ID
|
||||
*/
|
||||
case class IncomingTransactionDb(
|
||||
txIdBE: DoubleSha256DigestBE,
|
||||
incomingAmount: CurrencyUnit)
|
||||
extends TxDB {
|
||||
lazy val txId: DoubleSha256Digest = txIdBE.flip
|
||||
}
|
@ -1,13 +1,82 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
import org.bitcoins.core.wallet.fee.SatoshisPerByte
|
||||
import org.bitcoins.wallet.config._
|
||||
import slick.lifted.TableQuery
|
||||
import slick.lifted.{PrimaryKey, ProvenShape}
|
||||
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
case class OutgoingTransactionDAO()(
|
||||
implicit val ec: ExecutionContext,
|
||||
val appConfig: WalletAppConfig)
|
||||
extends TxDAO[OutgoingTransactionDb, OutgoingTransactionTable] {
|
||||
override val table = TableQuery[OutgoingTransactionTable]
|
||||
override val appConfig: WalletAppConfig)
|
||||
extends TxDAO[OutgoingTransactionDb] {
|
||||
|
||||
import profile.api._
|
||||
|
||||
override val table: profile.api.TableQuery[OutgoingTransactionTable] = TableQuery[OutgoingTransactionTable]
|
||||
|
||||
val txTable: profile.api.TableQuery[TransactionDAO#TransactionTable] = {
|
||||
TransactionDAO().table.asInstanceOf[TableQuery[TransactionDAO#TransactionTable]]
|
||||
}
|
||||
|
||||
class OutgoingTransactionTable(tag: Tag)
|
||||
extends TxTable[OutgoingTransactionDb](tag, "wallet_outgoing_txs") {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def txIdBE: Rep[DoubleSha256DigestBE] = column("txIdBE", O.Unique)
|
||||
|
||||
def inputAmount: Rep[CurrencyUnit] = column("inputAmount")
|
||||
|
||||
def sentAmount: Rep[CurrencyUnit] = column("sentAmount")
|
||||
|
||||
def actualFee: Rep[CurrencyUnit] = column("actualFee")
|
||||
|
||||
def expectedFee: Rep[CurrencyUnit] = column("expectedFee")
|
||||
|
||||
def feeRate: Rep[SatoshisPerByte] = column("feeRate")
|
||||
|
||||
private type OutgoingTransactionTuple =
|
||||
(
|
||||
DoubleSha256DigestBE,
|
||||
CurrencyUnit,
|
||||
CurrencyUnit,
|
||||
CurrencyUnit,
|
||||
CurrencyUnit,
|
||||
SatoshisPerByte)
|
||||
|
||||
private val fromTuple: OutgoingTransactionTuple => OutgoingTransactionDb = {
|
||||
case (txId, inputAmount, sentAmount, actualFee, expectedFee, feeRate) =>
|
||||
OutgoingTransactionDb(txId,
|
||||
inputAmount,
|
||||
sentAmount,
|
||||
actualFee,
|
||||
expectedFee,
|
||||
feeRate)
|
||||
}
|
||||
|
||||
private val toTuple: OutgoingTransactionDb => Option[
|
||||
OutgoingTransactionTuple] = tx =>
|
||||
Some(
|
||||
(tx.txIdBE,
|
||||
tx.inputAmount,
|
||||
tx.sentAmount,
|
||||
tx.actualFee,
|
||||
tx.expectedFee,
|
||||
tx.feeRate))
|
||||
|
||||
def * : ProvenShape[OutgoingTransactionDb] =
|
||||
(txIdBE, inputAmount, sentAmount, actualFee, expectedFee, feeRate) <> (fromTuple, toTuple)
|
||||
|
||||
def primaryKey: PrimaryKey =
|
||||
primaryKey("pk_tx", sourceColumns = txIdBE)
|
||||
|
||||
def fk_underlying_tx: slick.lifted.ForeignKeyQuery[_,TransactionDb] = {
|
||||
foreignKey("fk_underlying_tx",
|
||||
sourceColumns = txIdBE,
|
||||
targetTableQuery = txTable)(_.txIdBE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,52 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto._
|
||||
import org.bitcoins.core.currency._
|
||||
import org.bitcoins.core.protocol.transaction._
|
||||
import org.bitcoins.core.wallet.fee.SatoshisPerByte
|
||||
|
||||
/**
|
||||
* Represents a relevant transaction for the wallet that we should be keeping track of
|
||||
* @param txIdBE Transaction ID
|
||||
* @param actualFee fee paid by the transaction
|
||||
* @param expectedFee Fee rate the wallet expected to pay
|
||||
* @param feeRate Fee rate the transaction actually paid
|
||||
*/
|
||||
case class OutgoingTransactionDb(
|
||||
txIdBE: DoubleSha256DigestBE,
|
||||
inputAmount: CurrencyUnit,
|
||||
sentAmount: CurrencyUnit,
|
||||
actualFee: CurrencyUnit,
|
||||
expectedFee: CurrencyUnit,
|
||||
feeRate: SatoshisPerByte)
|
||||
extends TxDB {
|
||||
lazy val txId: DoubleSha256Digest = txIdBE.flip
|
||||
}
|
||||
|
||||
object OutgoingTransactionDb {
|
||||
|
||||
def fromTransaction(
|
||||
tx: Transaction,
|
||||
inputAmount: CurrencyUnit,
|
||||
sentAmount: CurrencyUnit,
|
||||
expectedFee: CurrencyUnit): OutgoingTransactionDb = {
|
||||
val totalOutput = tx.outputs.map(_.value).sum
|
||||
require(
|
||||
sentAmount <= totalOutput,
|
||||
s"sentAmount ($sentAmount) cannot be greater than the transaction's total output ($totalOutput)")
|
||||
require(
|
||||
sentAmount <= inputAmount,
|
||||
s"sentAmount ($sentAmount) cannot be greater than the amount the wallet input ($inputAmount)")
|
||||
|
||||
val feePaid = inputAmount - totalOutput
|
||||
val feeRate = feePaid.satoshis.toLong / tx.baseSize.toDouble
|
||||
OutgoingTransactionDb(
|
||||
tx.txIdBE,
|
||||
inputAmount,
|
||||
sentAmount,
|
||||
feePaid,
|
||||
expectedFee,
|
||||
SatoshisPerByte.fromLong(feeRate.toLong)
|
||||
)
|
||||
}
|
||||
}
|
@ -1,8 +1,13 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.DoubleSha256DigestBE
|
||||
import org.bitcoins.core.hd.HDAccount
|
||||
import org.bitcoins.core.protocol.script.ScriptPubKey
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
import org.bitcoins.core.hd._
|
||||
import org.bitcoins.core.protocol.script.{
|
||||
ScriptPubKey,
|
||||
ScriptWitness,
|
||||
WitnessScriptPubKey
|
||||
}
|
||||
import org.bitcoins.core.protocol.transaction.{
|
||||
Transaction,
|
||||
TransactionOutPoint,
|
||||
@ -11,20 +16,29 @@ import org.bitcoins.core.protocol.transaction.{
|
||||
import org.bitcoins.core.wallet.utxo.TxoState
|
||||
import org.bitcoins.db.CRUDAutoInc
|
||||
import org.bitcoins.wallet.config._
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.ProvenShape
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class SpendingInfoDAO()(
|
||||
implicit val ec: ExecutionContext,
|
||||
val appConfig: WalletAppConfig)
|
||||
override val appConfig: WalletAppConfig)
|
||||
extends CRUDAutoInc[SpendingInfoDb] {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
import profile.api._
|
||||
|
||||
/** The table inside our database we are inserting into */
|
||||
override val table = TableQuery[SpendingInfoTable]
|
||||
private val addrTable = TableQuery[AddressTable]
|
||||
override val table: profile.api.TableQuery[SpendingInfoTable] =
|
||||
profile.api.TableQuery[SpendingInfoTable]
|
||||
|
||||
private lazy val addrTable: profile.api.TableQuery[AddressDAO#AddressTable] = {
|
||||
AddressDAO()(ec, appConfig).table
|
||||
}
|
||||
|
||||
private lazy val txTable: profile.api.TableQuery[
|
||||
IncomingTransactionDAO#IncomingTransactionTable] = {
|
||||
IncomingTransactionDAO().table
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all the incoming TXOs in our DB that are in
|
||||
@ -45,7 +59,7 @@ case class SpendingInfoDAO()(
|
||||
txo.outPoint.inSet(tx.inputs.map(_.previousOutput))
|
||||
}
|
||||
|
||||
database.run(filtered.result)
|
||||
safeDatabase.run(filtered.result)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -58,7 +72,7 @@ case class SpendingInfoDAO()(
|
||||
filtered.join(addrTable).on(_.scriptPubKey === _.scriptPubKey)
|
||||
}
|
||||
|
||||
database.runVec(query.result)
|
||||
safeDatabase.runVec(query.result)
|
||||
}
|
||||
|
||||
/** Updates the [[org.bitcoins.core.wallet.utxo.TxoState TxoState]] of all of the given
|
||||
@ -93,13 +107,13 @@ case class SpendingInfoDAO()(
|
||||
*/
|
||||
def findTx(txid: DoubleSha256DigestBE): Future[Vector[SpendingInfoDb]] = {
|
||||
val filtered = table.filter(_.txid === txid)
|
||||
database.runVec(filtered.result)
|
||||
safeDatabase.runVec(filtered.result)
|
||||
}
|
||||
|
||||
def findByScriptPubKey(
|
||||
scriptPubKey: ScriptPubKey): Future[Vector[SpendingInfoDb]] = {
|
||||
val filtered = table.filter(_.scriptPubKey === scriptPubKey)
|
||||
database.runVec(filtered.result)
|
||||
safeDatabase.runVec(filtered.result)
|
||||
}
|
||||
|
||||
private val receivedStates: Set[TxoState] =
|
||||
@ -146,6 +160,170 @@ case class SpendingInfoDAO()(
|
||||
/** Enumerates all TX outpoints in the wallet */
|
||||
def findAllOutpoints(): Future[Vector[TransactionOutPoint]] = {
|
||||
val query = table.map(_.outPoint)
|
||||
database.runVec(query.result).map(_.toVector)
|
||||
safeDatabase.runVec(query.result).map(_.toVector)
|
||||
}
|
||||
|
||||
/**
|
||||
* This table stores the necessary information to spend
|
||||
* a transaction output (TXO) at a later point in time. It
|
||||
* also stores how many confirmations it has, whether
|
||||
* or not it is spent (i.e. if it is a UTXO or not) and the
|
||||
* TXID of the transaction that created this output.
|
||||
*/
|
||||
case class SpendingInfoTable(tag: Tag)
|
||||
extends TableAutoInc[SpendingInfoDb](tag, "txo_spending_info") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def outPoint: Rep[TransactionOutPoint] =
|
||||
column("tx_outpoint")
|
||||
|
||||
def txid: Rep[DoubleSha256DigestBE] = column("txid")
|
||||
|
||||
def state: Rep[TxoState] = column("txo_state")
|
||||
|
||||
def scriptPubKey: Rep[ScriptPubKey] = column("script_pub_key")
|
||||
|
||||
def value: Rep[CurrencyUnit] = column("value")
|
||||
|
||||
def privKeyPath: Rep[HDPath] = column("hd_privkey_path")
|
||||
|
||||
def redeemScriptOpt: Rep[Option[ScriptPubKey]] =
|
||||
column("redeem_script")
|
||||
|
||||
def scriptWitnessOpt: Rep[Option[ScriptWitness]] = column("script_witness")
|
||||
|
||||
def blockHash: Rep[Option[DoubleSha256DigestBE]] = column("block_hash")
|
||||
|
||||
/** All UTXOs must have a SPK in the wallet that gets spent to */
|
||||
def fk_scriptPubKey: slick.lifted.ForeignKeyQuery[_, AddressDb] = {
|
||||
val addressTable = addrTable
|
||||
foreignKey("fk_scriptPubKey",
|
||||
sourceColumns = scriptPubKey,
|
||||
targetTableQuery = addressTable)(_.scriptPubKey)
|
||||
}
|
||||
|
||||
/** All UTXOs must have a corresponding transaction in the wallet */
|
||||
def fk_incoming_txId: slick.lifted.ForeignKeyQuery[
|
||||
_,
|
||||
IncomingTransactionDb] = {
|
||||
foreignKey("fk_incoming_txId",
|
||||
sourceColumns = txid,
|
||||
targetTableQuery = txTable)(_.txIdBE)
|
||||
}
|
||||
|
||||
private type UTXOTuple = (
|
||||
Option[Long], // ID
|
||||
TransactionOutPoint,
|
||||
ScriptPubKey, // output SPK
|
||||
CurrencyUnit, // output value
|
||||
HDPath,
|
||||
Option[ScriptPubKey], // ReedemScript
|
||||
Option[ScriptWitness],
|
||||
TxoState, // state
|
||||
DoubleSha256DigestBE, // TXID
|
||||
Option[DoubleSha256DigestBE] // block hash
|
||||
)
|
||||
|
||||
private val fromTuple: UTXOTuple => SpendingInfoDb = {
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: SegWitHDPath,
|
||||
None, // ReedemScript
|
||||
Some(scriptWitness),
|
||||
state,
|
||||
txid,
|
||||
blockHash) =>
|
||||
SegwitV0SpendingInfo(
|
||||
outPoint = outpoint,
|
||||
output = TransactionOutput(value, spk),
|
||||
privKeyPath = path,
|
||||
scriptWitness = scriptWitness,
|
||||
id = id,
|
||||
state = state,
|
||||
txid = txid,
|
||||
blockHash = blockHash
|
||||
)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: LegacyHDPath,
|
||||
None, // RedeemScript
|
||||
None, // ScriptWitness
|
||||
state,
|
||||
txid,
|
||||
blockHash) =>
|
||||
LegacySpendingInfo(outPoint = outpoint,
|
||||
output = TransactionOutput(value, spk),
|
||||
privKeyPath = path,
|
||||
id = id,
|
||||
state = state,
|
||||
txid = txid,
|
||||
blockHash = blockHash)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: NestedSegWitHDPath,
|
||||
Some(redeemScript), // RedeemScript
|
||||
Some(scriptWitness), // ScriptWitness
|
||||
state,
|
||||
txid,
|
||||
blockHash)
|
||||
if WitnessScriptPubKey.isWitnessScriptPubKey(redeemScript.asm) =>
|
||||
NestedSegwitV0SpendingInfo(outpoint,
|
||||
TransactionOutput(value, spk),
|
||||
path,
|
||||
redeemScript,
|
||||
scriptWitness,
|
||||
txid,
|
||||
state,
|
||||
blockHash,
|
||||
id)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path,
|
||||
spkOpt,
|
||||
swOpt,
|
||||
spent,
|
||||
txid,
|
||||
blockHash) =>
|
||||
throw new IllegalArgumentException(
|
||||
"Could not construct UtxoSpendingInfoDb from bad tuple:"
|
||||
+ s" ($id, $outpoint, $spk, $value, $path, $spkOpt, $swOpt, $spent, $txid, $blockHash).")
|
||||
}
|
||||
|
||||
private val toTuple: SpendingInfoDb => Option[UTXOTuple] =
|
||||
utxo =>
|
||||
Some(
|
||||
(utxo.id,
|
||||
utxo.outPoint,
|
||||
utxo.output.scriptPubKey,
|
||||
utxo.output.value,
|
||||
utxo.privKeyPath,
|
||||
utxo.redeemScriptOpt,
|
||||
utxo.scriptWitnessOpt,
|
||||
utxo.state,
|
||||
utxo.txid,
|
||||
utxo.blockHash))
|
||||
|
||||
def * : ProvenShape[SpendingInfoDb] =
|
||||
(id.?,
|
||||
outPoint,
|
||||
scriptPubKey,
|
||||
value,
|
||||
privKeyPath,
|
||||
redeemScriptOpt,
|
||||
scriptWitnessOpt,
|
||||
state,
|
||||
txid,
|
||||
blockHash) <> (fromTuple, toTuple)
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,13 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.{DoubleSha256DigestBE, Sign}
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
import org.bitcoins.core.hd.{
|
||||
HDPath,
|
||||
LegacyHDPath,
|
||||
NestedSegWitHDPath,
|
||||
SegWitHDPath
|
||||
}
|
||||
import org.bitcoins.core.protocol.script.{
|
||||
ScriptPubKey,
|
||||
ScriptWitness,
|
||||
WitnessScriptPubKey
|
||||
}
|
||||
import org.bitcoins.core.protocol.script.{ScriptPubKey, ScriptWitness}
|
||||
import org.bitcoins.core.protocol.transaction.{
|
||||
TransactionOutPoint,
|
||||
TransactionOutput
|
||||
@ -23,10 +18,8 @@ import org.bitcoins.core.wallet.utxo.{
|
||||
ConditionalPath,
|
||||
TxoState
|
||||
}
|
||||
import org.bitcoins.db.{DbRowAutoInc, TableAutoInc}
|
||||
import org.bitcoins.db.DbRowAutoInc
|
||||
import org.bitcoins.keymanager.bip39.BIP39KeyManager
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.ProvenShape
|
||||
|
||||
/**
|
||||
* DB representation of a native V0
|
||||
@ -199,166 +192,3 @@ sealed trait SpendingInfoDb extends DbRowAutoInc[SpendingInfoDb] {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This table stores the necessary information to spend
|
||||
* a transaction output (TXO) at a later point in time. It
|
||||
* also stores how many confirmations it has, whether
|
||||
* or not it is spent (i.e. if it is a UTXO or not) and the
|
||||
* TXID of the transaction that created this output.
|
||||
*/
|
||||
case class SpendingInfoTable(tag: Tag)
|
||||
extends TableAutoInc[SpendingInfoDb](tag, "txo_spending_info") {
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def outPoint: Rep[TransactionOutPoint] =
|
||||
column("tx_outpoint")
|
||||
|
||||
def txid: Rep[DoubleSha256DigestBE] = column("txid")
|
||||
|
||||
def state: Rep[TxoState] = column("txo_state")
|
||||
|
||||
def scriptPubKey: Rep[ScriptPubKey] = column("script_pub_key")
|
||||
|
||||
def value: Rep[CurrencyUnit] = column("value")
|
||||
|
||||
def privKeyPath: Rep[HDPath] = column("hd_privkey_path")
|
||||
|
||||
def redeemScriptOpt: Rep[Option[ScriptPubKey]] =
|
||||
column("redeem_script")
|
||||
|
||||
def scriptWitnessOpt: Rep[Option[ScriptWitness]] = column("script_witness")
|
||||
|
||||
def blockHash: Rep[Option[DoubleSha256DigestBE]] = column("block_hash")
|
||||
|
||||
/** All UTXOs must have a SPK in the wallet that gets spent to */
|
||||
def fk_scriptPubKey = {
|
||||
val addressTable = TableQuery[AddressTable]
|
||||
foreignKey("fk_scriptPubKey",
|
||||
sourceColumns = scriptPubKey,
|
||||
targetTableQuery = addressTable)(_.scriptPubKey)
|
||||
}
|
||||
|
||||
/** All UTXOs must have a corresponding transaction in the wallet */
|
||||
def fk_incoming_txId = {
|
||||
val txTable = TableQuery[IncomingTransactionTable]
|
||||
foreignKey("fk_incoming_txId",
|
||||
sourceColumns = txid,
|
||||
targetTableQuery = txTable)(_.txIdBE)
|
||||
}
|
||||
|
||||
private type UTXOTuple = (
|
||||
Option[Long], // ID
|
||||
TransactionOutPoint,
|
||||
ScriptPubKey, // output SPK
|
||||
CurrencyUnit, // output value
|
||||
HDPath,
|
||||
Option[ScriptPubKey], // ReedemScript
|
||||
Option[ScriptWitness],
|
||||
TxoState, // state
|
||||
DoubleSha256DigestBE, // TXID
|
||||
Option[DoubleSha256DigestBE] // block hash
|
||||
)
|
||||
|
||||
private val fromTuple: UTXOTuple => SpendingInfoDb = {
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: SegWitHDPath,
|
||||
None, // ReedemScript
|
||||
Some(scriptWitness),
|
||||
state,
|
||||
txid,
|
||||
blockHash) =>
|
||||
SegwitV0SpendingInfo(
|
||||
outPoint = outpoint,
|
||||
output = TransactionOutput(value, spk),
|
||||
privKeyPath = path,
|
||||
scriptWitness = scriptWitness,
|
||||
id = id,
|
||||
state = state,
|
||||
txid = txid,
|
||||
blockHash = blockHash
|
||||
)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: LegacyHDPath,
|
||||
None, // RedeemScript
|
||||
None, // ScriptWitness
|
||||
state,
|
||||
txid,
|
||||
blockHash) =>
|
||||
LegacySpendingInfo(outPoint = outpoint,
|
||||
output = TransactionOutput(value, spk),
|
||||
privKeyPath = path,
|
||||
id = id,
|
||||
state = state,
|
||||
txid = txid,
|
||||
blockHash = blockHash)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path: NestedSegWitHDPath,
|
||||
Some(redeemScript), // RedeemScript
|
||||
Some(scriptWitness), // ScriptWitness
|
||||
state,
|
||||
txid,
|
||||
blockHash)
|
||||
if WitnessScriptPubKey.isWitnessScriptPubKey(redeemScript.asm) =>
|
||||
NestedSegwitV0SpendingInfo(outpoint,
|
||||
TransactionOutput(value, spk),
|
||||
path,
|
||||
redeemScript,
|
||||
scriptWitness,
|
||||
txid,
|
||||
state,
|
||||
blockHash,
|
||||
id)
|
||||
|
||||
case (id,
|
||||
outpoint,
|
||||
spk,
|
||||
value,
|
||||
path,
|
||||
spkOpt,
|
||||
swOpt,
|
||||
spent,
|
||||
txid,
|
||||
blockHash) =>
|
||||
throw new IllegalArgumentException(
|
||||
"Could not construct UtxoSpendingInfoDb from bad tuple:"
|
||||
+ s" ($id, $outpoint, $spk, $value, $path, $spkOpt, $swOpt, $spent, $txid, $blockHash).")
|
||||
}
|
||||
|
||||
private val toTuple: SpendingInfoDb => Option[UTXOTuple] =
|
||||
utxo =>
|
||||
Some(
|
||||
(utxo.id,
|
||||
utxo.outPoint,
|
||||
utxo.output.scriptPubKey,
|
||||
utxo.output.value,
|
||||
utxo.privKeyPath,
|
||||
utxo.redeemScriptOpt,
|
||||
utxo.scriptWitnessOpt,
|
||||
utxo.state,
|
||||
utxo.txid,
|
||||
utxo.blockHash))
|
||||
|
||||
def * : ProvenShape[SpendingInfoDb] =
|
||||
(id.?,
|
||||
outPoint,
|
||||
scriptPubKey,
|
||||
value,
|
||||
privKeyPath,
|
||||
redeemScriptOpt,
|
||||
scriptWitnessOpt,
|
||||
state,
|
||||
txid,
|
||||
blockHash) <> (fromTuple, toTuple)
|
||||
}
|
@ -1,37 +1,53 @@
|
||||
package org.bitcoins.wallet.models
|
||||
|
||||
import org.bitcoins.core.crypto.{DoubleSha256Digest, DoubleSha256DigestBE}
|
||||
import org.bitcoins.core.currency.CurrencyUnit
|
||||
import org.bitcoins.core.number.UInt32
|
||||
import org.bitcoins.core.protocol.transaction.Transaction
|
||||
import org.bitcoins.db.{CRUD, SlickUtil}
|
||||
import org.bitcoins.wallet.config._
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.{PrimaryKey, ProvenShape}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
trait TxDAO[DbEntryType <: TxDB, DbTable <: TxTable[DbEntryType]]
|
||||
extends CRUD[DbEntryType, DoubleSha256DigestBE] {
|
||||
trait TxCRUDComponent[DbEntryType <: TxDB] {
|
||||
self: CRUD[DbEntryType, DoubleSha256DigestBE] =>
|
||||
import profile.api._
|
||||
|
||||
abstract class TxTable[DbEntryType <: TxDB](
|
||||
tag: profile.api.Tag,
|
||||
tableName: String)
|
||||
extends Table[DbEntryType](tag, tableName) {
|
||||
def txIdBE: Rep[DoubleSha256DigestBE]
|
||||
}
|
||||
}
|
||||
|
||||
trait TxDAO[DbEntryType <: TxDB]
|
||||
extends CRUD[DbEntryType, DoubleSha256DigestBE]
|
||||
with TxCRUDComponent[DbEntryType]
|
||||
with SlickUtil[DbEntryType, DoubleSha256DigestBE] {
|
||||
import profile.api._
|
||||
implicit val ec: ExecutionContext
|
||||
|
||||
val appConfig: WalletAppConfig
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
override val table: TableQuery[DbTable]
|
||||
type DbTable = TxTable[DbEntryType]
|
||||
override val table: TableQuery[_ <: DbTable]
|
||||
|
||||
override def createAll(ts: Vector[DbEntryType]): Future[Vector[DbEntryType]] =
|
||||
SlickUtil.createAllNoAutoInc(ts, database, table)
|
||||
createAllNoAutoInc(ts, safeDatabase)
|
||||
|
||||
override protected def findByPrimaryKeys(txIdBEs: Vector[
|
||||
DoubleSha256DigestBE]): Query[Table[_], DbEntryType, Seq] =
|
||||
override protected def findByPrimaryKeys(
|
||||
txIdBEs: Vector[DoubleSha256DigestBE]): Query[DbTable, DbEntryType, Seq] =
|
||||
table.filter(_.txIdBE.inSet(txIdBEs))
|
||||
|
||||
override def findByPrimaryKey(
|
||||
txIdBE: DoubleSha256DigestBE): Query[Table[_], DbEntryType, Seq] = {
|
||||
txIdBE: DoubleSha256DigestBE): Query[DbTable, DbEntryType, Seq] = {
|
||||
table.filter(_.txIdBE === txIdBE)
|
||||
}
|
||||
|
||||
override def findAll(
|
||||
txs: Vector[DbEntryType]): Query[Table[_], DbEntryType, Seq] =
|
||||
txs: Vector[DbEntryType]): Query[DbTable, DbEntryType, Seq] =
|
||||
findByPrimaryKeys(txs.map(_.txIdBE))
|
||||
|
||||
def findByTxId(txIdBE: DoubleSha256DigestBE): Future[Option[DbEntryType]] = {
|
||||
@ -56,7 +72,95 @@ trait TxDAO[DbEntryType <: TxDB, DbTable <: TxTable[DbEntryType]]
|
||||
|
||||
case class TransactionDAO()(
|
||||
implicit val ec: ExecutionContext,
|
||||
val appConfig: WalletAppConfig)
|
||||
extends TxDAO[TransactionDb, TransactionTable] {
|
||||
override val appConfig: WalletAppConfig)
|
||||
extends TxDAO[TransactionDb] {
|
||||
|
||||
import profile.api._
|
||||
|
||||
override val table = TableQuery[TransactionTable]
|
||||
|
||||
class TransactionTable(tag: Tag)
|
||||
extends TxTable[TransactionDb](tag, "tx_table") {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def txIdBE: Rep[DoubleSha256DigestBE] = column("txIdBE", O.Unique)
|
||||
|
||||
def transaction: Rep[Transaction] = column("transaction")
|
||||
|
||||
def unsignedTxIdBE: Rep[DoubleSha256DigestBE] = column("unsignedTxIdBE")
|
||||
|
||||
def unsignedTx: Rep[Transaction] = column("unsignedTx")
|
||||
|
||||
def wTxIdBEOpt: Rep[Option[DoubleSha256DigestBE]] =
|
||||
column("wTxIdBE")
|
||||
|
||||
def totalOutput: Rep[CurrencyUnit] = column("totalOutput")
|
||||
|
||||
def numInputs: Rep[Int] = column("numInputs")
|
||||
|
||||
def numOutputs: Rep[Int] = column("numOutputs")
|
||||
|
||||
def locktime: Rep[UInt32] = column("locktime")
|
||||
|
||||
private type TransactionTuple =
|
||||
(
|
||||
DoubleSha256DigestBE,
|
||||
Transaction,
|
||||
DoubleSha256DigestBE,
|
||||
Transaction,
|
||||
Option[DoubleSha256DigestBE],
|
||||
CurrencyUnit,
|
||||
Int,
|
||||
Int,
|
||||
UInt32)
|
||||
|
||||
private val fromTuple: TransactionTuple => TransactionDb = {
|
||||
case (txId,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime) =>
|
||||
TransactionDb(txId,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime)
|
||||
}
|
||||
|
||||
private val toTuple: TransactionDb => Option[TransactionTuple] = tx =>
|
||||
Some(
|
||||
(tx.txIdBE,
|
||||
tx.transaction,
|
||||
tx.unsignedTxIdBE,
|
||||
tx.unsignedTx,
|
||||
tx.wTxIdBEOpt,
|
||||
tx.totalOutput,
|
||||
tx.numInputs,
|
||||
tx.numOutputs,
|
||||
tx.lockTime))
|
||||
|
||||
def * : ProvenShape[TransactionDb] =
|
||||
(txIdBE,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime) <> (fromTuple, toTuple)
|
||||
|
||||
def primaryKey: PrimaryKey =
|
||||
primaryKey("pk_tx", sourceColumns = txIdBE)
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -11,17 +11,11 @@ import org.bitcoins.core.protocol.transaction.{
|
||||
TransactionInput,
|
||||
WitnessTransaction
|
||||
}
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
import slick.lifted.{PrimaryKey, ProvenShape}
|
||||
|
||||
trait TxDB {
|
||||
def txIdBE: DoubleSha256DigestBE
|
||||
}
|
||||
|
||||
trait TxTable[DbEntryType <: TxDB] extends Table[DbEntryType] {
|
||||
def txIdBE: Rep[DoubleSha256DigestBE]
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a relevant transaction for the wallet that we should be keeping track of
|
||||
* @param txIdBE Transaction ID
|
||||
@ -94,89 +88,3 @@ object TransactionDb {
|
||||
tx.lockTime)
|
||||
}
|
||||
}
|
||||
|
||||
class TransactionTable(tag: Tag)
|
||||
extends Table[TransactionDb](tag, "tx_table")
|
||||
with TxTable[TransactionDb] {
|
||||
|
||||
import org.bitcoins.db.DbCommonsColumnMappers._
|
||||
|
||||
def txIdBE: Rep[DoubleSha256DigestBE] = column("txIdBE", O.Unique)
|
||||
|
||||
def transaction: Rep[Transaction] = column("transaction")
|
||||
|
||||
def unsignedTxIdBE: Rep[DoubleSha256DigestBE] = column("unsignedTxIdBE")
|
||||
|
||||
def unsignedTx: Rep[Transaction] = column("unsignedTx")
|
||||
|
||||
def wTxIdBEOpt: Rep[Option[DoubleSha256DigestBE]] =
|
||||
column("wTxIdBE")
|
||||
|
||||
def totalOutput: Rep[CurrencyUnit] = column("totalOutput")
|
||||
|
||||
def numInputs: Rep[Int] = column("numInputs")
|
||||
|
||||
def numOutputs: Rep[Int] = column("numOutputs")
|
||||
|
||||
def locktime: Rep[UInt32] = column("locktime")
|
||||
|
||||
private type TransactionTuple =
|
||||
(
|
||||
DoubleSha256DigestBE,
|
||||
Transaction,
|
||||
DoubleSha256DigestBE,
|
||||
Transaction,
|
||||
Option[DoubleSha256DigestBE],
|
||||
CurrencyUnit,
|
||||
Int,
|
||||
Int,
|
||||
UInt32)
|
||||
|
||||
private val fromTuple: TransactionTuple => TransactionDb = {
|
||||
case (txId,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime) =>
|
||||
TransactionDb(txId,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime)
|
||||
}
|
||||
|
||||
private val toTuple: TransactionDb => Option[TransactionTuple] = tx =>
|
||||
Some(
|
||||
(tx.txIdBE,
|
||||
tx.transaction,
|
||||
tx.unsignedTxIdBE,
|
||||
tx.unsignedTx,
|
||||
tx.wTxIdBEOpt,
|
||||
tx.totalOutput,
|
||||
tx.numInputs,
|
||||
tx.numOutputs,
|
||||
tx.lockTime))
|
||||
|
||||
def * : ProvenShape[TransactionDb] =
|
||||
(txIdBE,
|
||||
transaction,
|
||||
unsignedTxIdBE,
|
||||
unsignedTx,
|
||||
wTxIdBEOpt,
|
||||
totalOutput,
|
||||
numInputs,
|
||||
numOutputs,
|
||||
locktime) <> (fromTuple, toTuple)
|
||||
|
||||
def primaryKey: PrimaryKey =
|
||||
primaryKey("pk_tx", sourceColumns = txIdBE)
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user