CRUD Test suite + updateAll improvements (#1618)

* CRUD Tests

* Remove some hard coded values

* DeleteAll test optimization

* Make deleteAll transactional

* Fix deleteAll test
This commit is contained in:
Ben Carman 2020-07-30 10:10:56 -05:00 committed by GitHub
parent cad6fbeaaf
commit 0ece51379e
8 changed files with 400 additions and 24 deletions

View file

@ -0,0 +1,41 @@
package org.bitcoins.db
import org.bitcoins.testkit.db.TestPostgresDAOFixture
class CRUDTestPostgres extends TestPostgresDAOFixture {
it must "successfully create a db row and read it back" in { testDAO =>
testCreate(testDAO).map(result => assert(result))
}
it must "successfully create multiple db row and read them back" in {
testDAO =>
testCreateAll(testDAO).map(result => assert(result))
}
it must "successfully delete a db row" in { testDAO =>
testDelete(testDAO).map(result => assert(result))
}
it must "successfully delete multiple db rows" in { testDAO =>
testDeleteAll(testDAO).map(result => assert(result))
}
it must "successfully upsert a db row and read it back" in { testDAO =>
testUpsert(testDAO).map(result => assert(result))
}
it must "successfully upsert multiple db row and read them back" in {
testDAO =>
testUpsertAll(testDAO).map(result => assert(result))
}
it must "successfully update a db row and read it back" in { testDAO =>
testUpdate(testDAO).map(result => assert(result))
}
it must "successfully update multiple db rows and read them back" in {
testDAO =>
testUpdateAll(testDAO).map(result => assert(result))
}
}

View file

@ -0,0 +1,41 @@
package org.bitcoins.db
import org.bitcoins.testkit.db.TestSQLiteDAOFixture
class CRUDTestSQLite extends TestSQLiteDAOFixture {
it must "successfully create a db row and read it back" in { testDAO =>
testCreate(testDAO).map(result => assert(result))
}
it must "successfully create multiple db row and read them back" in {
testDAO =>
testCreateAll(testDAO).map(result => assert(result))
}
it must "successfully delete a db row" in { testDAO =>
testDelete(testDAO).map(result => assert(result))
}
it must "successfully delete multiple db rows" in { testDAO =>
testDeleteAll(testDAO).map(result => assert(result))
}
it must "successfully upsert a db row and read it back" in { testDAO =>
testUpsert(testDAO).map(result => assert(result))
}
it must "successfully upsert multiple db row and read them back" in {
testDAO =>
testUpsertAll(testDAO).map(result => assert(result))
}
it must "successfully update a db row and read it back" in { testDAO =>
testUpdate(testDAO).map(result => assert(result))
}
it must "successfully update multiple db rows and read them back" in {
testDAO =>
testUpdateAll(testDAO).map(result => assert(result))
}
}

View file

@ -2,7 +2,7 @@ package org.bitcoins.db
import java.sql.SQLException
import org.bitcoins.core.util.BitcoinSLogger
import org.bitcoins.core.util.{BitcoinSLogger, FutureUtil}
import slick.dbio.{DBIOAction, NoStream}
import scala.concurrent.{ExecutionContext, Future}
@ -84,15 +84,22 @@ abstract class CRUD[T, PrimaryKeyType](implicit
}
}
/** Updates all of the given ts in the database */
// FIXME: This is a temporary fix for https://github.com/bitcoin-s/bitcoin-s/issues/1586
// This is an inefficient solution that does each update individually
def updateAll(ts: Vector[T]): Future[Vector[T]] = {
val query = findAll(ts)
val actions = ts.map(t => query.update(t))
val affectedRows: Future[Vector[Int]] =
safeDatabase.run(DBIO.sequence(actions).transactionally)
val updatedTs = findAll(ts)
affectedRows.flatMap { _ =>
safeDatabase.runVec(updatedTs.result)
def oldUpdateAll(ts: Vector[T]): Future[Vector[T]] = {
val query = findAll(ts)
val actions = ts.map(query.update)
val affectedRows: Future[Vector[Int]] =
safeDatabase.runVec(DBIO.sequence(actions).transactionally)
val updatedTs = findAll(ts)
affectedRows.flatMap { _ =>
safeDatabase.runVec(updatedTs.result)
}
}
FutureUtil.foldLeftAsync(Vector.empty[T], ts) { (accum, t) =>
oldUpdateAll(Vector(t)).map(accum ++ _)
}
}
@ -112,7 +119,7 @@ abstract class CRUD[T, PrimaryKeyType](implicit
* delete all records from the table
*/
def deleteAll(): Future[Int] =
safeDatabase.run(table.delete)
safeDatabase.run(table.delete.transactionally)
/**
* insert the record if it does not exist, update it if it does
@ -120,7 +127,14 @@ abstract class CRUD[T, PrimaryKeyType](implicit
* @param t - the record to inserted / updated
* @return t - the record that has been inserted / updated
*/
def upsert(t: T): Future[T] = upsertAll(Vector(t)).map(_.head)
def upsert(t: T): Future[T] = {
upsertAll(Vector(t)).map { ts =>
ts.headOption match {
case Some(updated) => updated
case None => throw UpsertFailedException("Upsert failed for: " + t)
}
}
}
/** Upserts all of the given ts in the database, then returns the upserted values */
def upsertAll(ts: Vector[T]): Future[Vector[T]] = {
@ -212,3 +226,6 @@ case class SafeDatabase(jdbcProfile: JdbcProfileComponent[AppConfig])
case class UpdateFailedException(message: String)
extends RuntimeException(message)
case class UpsertFailedException(message: String)
extends RuntimeException(message)

View file

@ -1,7 +1,5 @@
package org.bitcoins.db
import org.bitcoins.core.util.FutureUtil
import scala.concurrent.{ExecutionContext, Future}
abstract class CRUDAutoInc[T <: DbRowAutoInc[T]](implicit
@ -24,14 +22,6 @@ abstract class CRUDAutoInc[T <: DbRowAutoInc[T]](implicit
safeDatabase.runVec(actions.transactionally)
}
// FIXME: This is a temporary fix for https://github.com/bitcoin-s/bitcoin-s/issues/1586
// This is an inefficient solution that does each update individually
override def updateAll(ts: Vector[T]): Future[Vector[T]] = {
FutureUtil.foldLeftAsync(Vector.empty[T], ts) { (accum, t) =>
super.updateAll(Vector(t)).map(accum ++ _)
}
}
override def findByPrimaryKeys(
ids: Vector[Long]): Query[TableAutoInc[T], T, Seq] = {
table.filter { t =>

View file

@ -90,8 +90,9 @@ object BitcoinSTestAppConfig {
case object Wallet extends ProjectType
case object Node extends ProjectType
case object Chain extends ProjectType
case object Test extends ProjectType
val all = List(Wallet, Node, Chain)
val all = List(Wallet, Node, Chain, Test)
}
/** Generates a Typesafe config with DBs set to memory

View file

@ -8,9 +8,9 @@ import scala.util.Try
trait EmbeddedPg extends BeforeAndAfterAll { this: Suite =>
val pgEnabled: Boolean = sys.env.contains("PG_ENABLED")
lazy val pgEnabled: Boolean = sys.env.contains("PG_ENABLED")
val pg: Option[EmbeddedPostgres] =
lazy val pg: Option[EmbeddedPostgres] =
if (pgEnabled) Some(EmbeddedPostgres.start()) else None
def pgUrl(dbname: String): Option[String] =
@ -21,6 +21,7 @@ trait EmbeddedPg extends BeforeAndAfterAll { this: Suite =>
case ProjectType.Wallet => pgUrl("walletdb")
case ProjectType.Node => pgUrl("nodedb")
case ProjectType.Chain => pgUrl("chaindb")
case ProjectType.Test => pgUrl("testdb")
}
override def beforeAll(): Unit = {
@ -28,6 +29,7 @@ trait EmbeddedPg extends BeforeAndAfterAll { this: Suite =>
executePgSql(s"CREATE DATABASE chaindb")
executePgSql(s"CREATE DATABASE walletdb")
executePgSql(s"CREATE DATABASE nodedb")
executePgSql(s"CREATE DATABASE testdb")
}
override def afterAll(): Unit = {
@ -35,6 +37,7 @@ trait EmbeddedPg extends BeforeAndAfterAll { this: Suite =>
Try(executePgSql(s"DROP DATABASE nodedb"))
Try(executePgSql(s"DROP DATABASE walletdb"))
Try(executePgSql(s"DROP DATABASE chaindb"))
Try(executePgSql(s"DROP DATABASE testdb"))
Try(pg.foreach(_.close()))
()
}

View file

@ -0,0 +1,116 @@
package org.bitcoins.testkit.db
import java.nio.file.{Files, Path}
import akka.actor.ActorSystem
import com.typesafe.config.Config
import org.bitcoins.core.util.FutureUtil
import org.bitcoins.db._
import scodec.bits.ByteVector
import slick.lifted.ProvenShape
import scala.concurrent.{ExecutionContext, Future}
object DbTestUtil {
def createTestDbManagement(testAppConfig: TestAppConfig)(implicit
system: ActorSystem): TestDbManagement = {
new TestDbManagement with JdbcProfileComponent[TestAppConfig] {
override val ec: ExecutionContext = system.dispatcher
override def appConfig: TestAppConfig = testAppConfig
}
}
}
trait TestDbManagement extends DbManagement {
_: JdbcProfileComponent[TestAppConfig] =>
import profile.api._
def ec: ExecutionContext
private lazy val testTable: TableQuery[Table[_]] =
TestDAO()(ec, appConfig).table
override lazy val allTables: List[TableQuery[Table[_]]] =
List(testTable)
}
case class TestAppConfig(
private val directory: Path,
override val useLogbackConf: Boolean,
private val conf: Config*)(implicit override val ec: ExecutionContext)
extends AppConfig
with TestDbManagement
with JdbcProfileComponent[TestAppConfig] {
override protected[bitcoins] def configOverrides: List[Config] = conf.toList
override protected[bitcoins] def moduleName: String = "test"
override protected[bitcoins] type ConfigType = TestAppConfig
override protected[bitcoins] def newConfigOfType(
configs: Seq[Config]): TestAppConfig =
TestAppConfig(directory, useLogbackConf, configs: _*)
protected[bitcoins] def baseDatadir: Path = directory
override def appConfig: TestAppConfig = this
override def initialize()(implicit ec: ExecutionContext): Future[Unit] = {
logger.debug(s"Initializing test setup")
if (Files.notExists(datadir)) {
Files.createDirectories(datadir)
}
createTable(TestDAO()(ec, this).table)
}
/** Starts the associated application */
override def start(): Future[Unit] = FutureUtil.unit
}
case class TestDb(pk: String, data: ByteVector)
case class TestDAO()(implicit
val ec: ExecutionContext,
override val appConfig: TestAppConfig)
extends CRUD[TestDb, String]
with SlickUtil[TestDb, String] {
import profile.api._
private val mappers = new org.bitcoins.db.DbCommonsColumnMappers(profile)
import mappers._
override val table: TableQuery[TestTable] = TableQuery[TestTable]
override def createAll(ts: Vector[TestDb]): Future[Vector[TestDb]] =
createAllNoAutoInc(ts, safeDatabase)
override protected def findByPrimaryKeys(
ts: Vector[String]): Query[TestTable, TestDb, Seq] = {
table.filter(_.pk.inSet(ts))
}
override def findByPrimaryKey(t: String): Query[TestTable, TestDb, Seq] = {
table.filter(_.pk === t)
}
override def findAll(ts: Vector[TestDb]): Query[TestTable, TestDb, Seq] =
findByPrimaryKeys(ts.map(_.pk))
class TestTable(tag: Tag) extends Table[TestDb](tag, "test_table") {
def pk: Rep[String] = column[String]("pk", O.PrimaryKey)
def data: Rep[ByteVector] = column[ByteVector]("data")
def * : ProvenShape[TestDb] =
(pk, data) <> (TestDb.tupled, TestDb.unapply)
}
}

View file

@ -0,0 +1,167 @@
package org.bitcoins.testkit.db
import org.bitcoins.db.AppConfig
import org.bitcoins.testkit.BitcoinSTestAppConfig.ProjectType
import org.bitcoins.testkit.fixtures.BitcoinSFixture
import org.bitcoins.testkit.{BitcoinSTestAppConfig, EmbeddedPg}
import org.scalatest._
import org.scalatest.flatspec.FixtureAsyncFlatSpec
import scodec.bits._
import scala.concurrent.Future
sealed trait TestDAOFixture
extends FixtureAsyncFlatSpec
with BeforeAndAfterAll
with BitcoinSFixture
with EmbeddedPg {
final override type FixtureParam = TestDAO
implicit private val testConfig: TestAppConfig = TestAppConfig(
BitcoinSTestAppConfig.tmpDir(),
useLogbackConf = true,
BitcoinSTestAppConfig.configWithEmbeddedDb(Some(ProjectType.Test), pgUrl))
override def beforeAll(): Unit = {
AppConfig.throwIfDefaultDatadir(testConfig)
super.beforeAll()
}
override def afterAll(): Unit = {
super.afterAll()
testConfig.stop()
()
}
def withFixture(test: OneArgAsyncTest): FutureOutcome = {
makeFixture(
build = () => testConfig.initialize().map(_ => TestDAO()),
destroy = () => dropAll()
)(test)
}
def dropAll(): Future[Unit] =
for {
_ <- testConfig.dropTable("flyway_schema_history")
_ <- testConfig.dropAll()
} yield ()
val testDb: TestDb = TestDb("abc", hex"0054")
val testDbs: Vector[TestDb] = Vector(TestDb("abc", hex"0050"),
TestDb("abc1", hex"0051"),
TestDb("abc2", hex"0052"),
TestDb("abc3", hex"0053"))
val updatedDb: TestDb = testDb.copy(data = hex"0000")
val updatedDbs: Vector[TestDb] = testDbs.map(_.copy(data = hex"0000"))
def testCreate(testDAO: TestDAO): Future[Boolean] = {
for {
_ <- testDAO.create(testDb)
read <- testDAO.read(testDb.pk)
} yield read.contains(testDb)
}
def testCreateAll(testDAO: TestDAO): Future[Boolean] = {
for {
_ <- testDAO.createAll(testDbs)
read0 <- testDAO.read(id = "abc")
read1 <- testDAO.read(id = "abc1")
read2 <- testDAO.read(id = "abc2")
read3 <- testDAO.read(id = "abc3")
} yield {
val read = Vector(read0, read1, read2, read3).flatten
read == testDbs
}
}
def testDelete(testDAO: TestDAO): Future[Boolean] = {
for {
create <- testCreate(testDAO)
_ = assert(create)
_ <- testDAO.delete(testDb)
read2 <- testDAO.read(testDb.pk)
} yield read2.isEmpty
}
def testDeleteAll(testDAO: TestDAO): Future[Boolean] = {
for {
created <- testCreateAll(testDAO)
_ = assert(created)
_ <- testDAO.deleteAll()
all <- testDAO.findAll()
} yield all.isEmpty
}
def testUpsert(testDAO: TestDAO): Future[Boolean] = {
for {
_ <- testDAO.upsert(testDb)
read <- testDAO.read(testDb.pk)
_ = assert(read.contains(testDb))
_ <- testDAO.upsert(updatedDb)
read2 <- testDAO.read(testDb.pk)
} yield read2.contains(updatedDb)
}
def testUpsertAll(testDAO: TestDAO): Future[Boolean] = {
for {
_ <- testDAO.upsertAll(testDbs)
read0 <- testDAO.read(id = "abc")
read1 <- testDAO.read(id = "abc1")
read2 <- testDAO.read(id = "abc2")
read3 <- testDAO.read(id = "abc3")
read = Vector(read0, read1, read2, read3).flatten
_ = assert(read == testDbs)
_ <- testDAO.upsertAll(updatedDbs)
secondRead0 <- testDAO.read(id = "abc")
secondRead1 <- testDAO.read(id = "abc1")
secondRead2 <- testDAO.read(id = "abc2")
secondRead3 <- testDAO.read(id = "abc3")
} yield {
val read2 =
Vector(secondRead0, secondRead1, secondRead2, secondRead3).flatten
read2 == updatedDbs
}
}
def testUpdate(testDAO: TestDAO): Future[Boolean] = {
for {
created <- testCreate(testDAO)
_ = assert(created)
_ <- testDAO.update(updatedDb)
read2 <- testDAO.read(updatedDb.pk)
} yield read2.contains(updatedDb)
}
def testUpdateAll(testDAO: TestDAO): Future[Boolean] = {
for {
created <- testCreateAll(testDAO)
_ = assert(created)
_ <- testDAO.updateAll(updatedDbs)
secondRead0 <- testDAO.read(id = "abc")
secondRead1 <- testDAO.read(id = "abc1")
secondRead2 <- testDAO.read(id = "abc2")
secondRead3 <- testDAO.read(id = "abc3")
} yield {
val read2 =
Vector(secondRead0, secondRead1, secondRead2, secondRead3).flatten
read2 == updatedDbs
}
}
}
trait TestSQLiteDAOFixture extends TestDAOFixture {
override lazy val pgEnabled: Boolean = false
}
trait TestPostgresDAOFixture extends TestDAOFixture {
override lazy val pgEnabled: Boolean = true
}