mirror of
https://github.com/ACINQ/eclair.git
synced 2025-03-13 19:37:35 +01:00
Merge branch 'master' into android
This commit is contained in:
commit
e0f99f9714
52 changed files with 1928 additions and 462 deletions
|
@ -26,19 +26,58 @@ where OPTIONS can be:
|
|||
-h Show this help
|
||||
-s Some commands can print a trimmed JSON
|
||||
|
||||
and COMMAND is one of:
|
||||
getinfo, connect, disconnect, open, close, forceclose, updaterelayfee,
|
||||
peers, channels, channel, allnodes, allchannels, allupdates
|
||||
findroute, findroutetonode, parseinvoice, payinvoice, sendtonode,
|
||||
sendtoroute, getsentinfo, createinvoice, getinvoice, listinvoices,
|
||||
listpendinginvoices, getreceivedinfo, audit, networkfees,
|
||||
channelstats, usablebalances
|
||||
and COMMAND is one of the available commands:
|
||||
|
||||
=== Node ===
|
||||
- getinfo
|
||||
- connect
|
||||
- disconnect
|
||||
- peers
|
||||
- allnodes
|
||||
- audit
|
||||
|
||||
=== Channel ===
|
||||
- open
|
||||
- close
|
||||
- forceclose
|
||||
- channel
|
||||
- channels
|
||||
- allchannels
|
||||
- allupdates
|
||||
- channelstats
|
||||
- networkfees
|
||||
- updaterelayfee
|
||||
|
||||
=== Path-finding ===
|
||||
- findroute
|
||||
- findroutetonode
|
||||
- networkstats
|
||||
|
||||
=== Invoice ===
|
||||
- createinvoice
|
||||
- getinvoice
|
||||
- listinvoices
|
||||
- listpendinginvoices
|
||||
- parseinvoice
|
||||
|
||||
=== Payment ===
|
||||
- getnewaddress
|
||||
- usablebalances
|
||||
- onchainbalance
|
||||
- payinvoice
|
||||
- sendtonode
|
||||
- sendtoroute
|
||||
- sendonchain
|
||||
- getsentinfo
|
||||
- getreceivedinfo
|
||||
- onchaintransactions
|
||||
|
||||
Examples
|
||||
--------
|
||||
eclair-cli -a localhost:1234 peers list the peers of a node hosted on localhost:1234
|
||||
eclair-cli close --channelId=006fb... closes the channel with id 006fb...
|
||||
|
||||
eclair-cli -a localhost:1234 peers list the peers of a node hosted on localhost:1234
|
||||
eclair-cli connect --nodeId=03864e... connect to node with id 03864e...
|
||||
eclair-cli open --nodeId=... --fundingSatoshis=... open a channel to a given node
|
||||
eclair-cli close --channelId=006fb... closes the channel with id 006fb...
|
||||
|
||||
Full documentation here: <https://acinq.github.io/eclair>" 1>&2;
|
||||
exit 1;
|
||||
|
|
|
@ -66,11 +66,13 @@ eclair {
|
|||
max-to-local-delay-blocks = 2016 // maximum number of blocks that we are ready to accept for our own delayed outputs (2016 ~ 2 weeks)
|
||||
mindepth-blocks = 3
|
||||
expiry-delta-blocks = 144
|
||||
// When we receive the pre-image for an HTLC and want to fulfill it but the upstream peer stops responding, we want to
|
||||
// When we receive the preimage for an HTLC and want to fulfill it but the upstream peer stops responding, we want to
|
||||
// avoid letting its HTLC-timeout transaction become enforceable on-chain (otherwise there is a race condition between
|
||||
// our HTLC-success and their HTLC-timeout).
|
||||
// We will close the channel when the HTLC-timeout will happen in less than this number.
|
||||
fulfill-safety-before-timeout-blocks = 6
|
||||
// NB: this number effectively reduces the expiry-delta-blocks, so you may want to take that into account and increase
|
||||
// expiry-delta-blocks.
|
||||
fulfill-safety-before-timeout-blocks = 24
|
||||
|
||||
fee-base-msat = 1000
|
||||
fee-proportional-millionths = 100 // fee charged per transferred satoshi in millionths of a satoshi (100 = 0.01%)
|
||||
|
@ -150,6 +152,11 @@ eclair {
|
|||
ratio-cltv = 0.15 // when computing the weight for a channel, consider its CLTV delta in this proportion
|
||||
ratio-channel-age = 0.35 // when computing the weight for a channel, consider its AGE in this proportion
|
||||
ratio-channel-capacity = 0.5 // when computing the weight for a channel, consider its CAPACITY in this proportion
|
||||
|
||||
mpp {
|
||||
min-amount-satoshis = 15000 // minimum amount sent via partial HTLCs
|
||||
max-parts = 6 // maximum number of HTLCs sent per payment: increasing this value will impact performance
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class CheckElectrumSetup(datadir: File,
|
|||
|
||||
val config = system.settings.config.getConfig("eclair")
|
||||
val chain = config.getString("chain")
|
||||
val keyManager = new LocalKeyManager(randomBytes(32), NodeParams.makeChainHash(chain))
|
||||
val keyManager = new LocalKeyManager(randomBytes(32), NodeParams.hashFromChain(chain))
|
||||
val database = db match {
|
||||
case Some(d) => d
|
||||
case None => Databases.sqliteJDBC(new File(datadir, chain))
|
||||
|
|
|
@ -55,6 +55,7 @@ case class CltvExpiryDelta(private val underlying: Int) extends Ordered[CltvExpi
|
|||
def +(other: Int): CltvExpiryDelta = CltvExpiryDelta(underlying + other)
|
||||
def +(other: CltvExpiryDelta): CltvExpiryDelta = CltvExpiryDelta(underlying + other.underlying)
|
||||
def -(other: CltvExpiryDelta): CltvExpiryDelta = CltvExpiryDelta(underlying - other.underlying)
|
||||
def *(m: Int): CltvExpiryDelta = CltvExpiryDelta(underlying * m)
|
||||
def compare(other: CltvExpiryDelta): Int = underlying.compareTo(other.underlying)
|
||||
def toInt: Int = underlying
|
||||
// @formatter:on
|
||||
|
|
|
@ -24,6 +24,9 @@ import akka.util.Timeout
|
|||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{ByteVector32, Satoshi}
|
||||
import fr.acinq.eclair.TimestampQueryFilters._
|
||||
import fr.acinq.eclair.blockchain.OnChainBalance
|
||||
import fr.acinq.eclair.blockchain.bitcoind.BitcoinCoreWallet
|
||||
import fr.acinq.eclair.blockchain.bitcoind.BitcoinCoreWallet.WalletTransaction
|
||||
import fr.acinq.eclair.channel.Register.{Forward, ForwardShortId}
|
||||
import fr.acinq.eclair.channel._
|
||||
import fr.acinq.eclair.db.{IncomingPayment, NetworkFee, OutgoingPayment, Stats}
|
||||
|
@ -42,7 +45,7 @@ import scala.concurrent.duration._
|
|||
import scala.concurrent.{ExecutionContext, Future}
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
case class GetInfoResponse(version: String, nodeId: PublicKey, alias: String, color: String, features: Features, chainHash: ByteVector32, blockHeight: Int, publicAddresses: Seq[NodeAddress])
|
||||
case class GetInfoResponse(version: String, nodeId: PublicKey, alias: String, color: String, features: Features, chainHash: ByteVector32, network: String, blockHeight: Int, publicAddresses: Seq[NodeAddress])
|
||||
|
||||
case class AuditResponse(sent: Seq[PaymentSent], received: Seq[PaymentReceived], relayed: Seq[PaymentRelayed])
|
||||
|
||||
|
@ -95,6 +98,8 @@ trait Eclair {
|
|||
|
||||
def sentInfo(id: Either[UUID, ByteVector32])(implicit timeout: Timeout): Future[Seq[OutgoingPayment]]
|
||||
|
||||
def sendOnChain(address: String, amount: Satoshi, confirmationTarget: Long): Future[ByteVector32]
|
||||
|
||||
def findRoute(targetNodeId: PublicKey, amount: MilliSatoshi, assistedRoutes: Seq[Seq[PaymentRequest.ExtraHop]] = Seq.empty)(implicit timeout: Timeout): Future[RouteResponse]
|
||||
|
||||
def sendToRoute(amount: MilliSatoshi, recipientAmount_opt: Option[MilliSatoshi], externalId_opt: Option[String], parentId_opt: Option[UUID], invoice: PaymentRequest, finalCltvExpiryDelta: CltvExpiryDelta, route: Seq[PublicKey], trampolineSecret_opt: Option[ByteVector32] = None, trampolineFees_opt: Option[MilliSatoshi] = None, trampolineExpiryDelta_opt: Option[CltvExpiryDelta] = None, trampolineNodes_opt: Seq[PublicKey] = Nil)(implicit timeout: Timeout): Future[SendPaymentToRouteResponse]
|
||||
|
@ -119,9 +124,14 @@ trait Eclair {
|
|||
|
||||
def allUpdates(nodeId_opt: Option[PublicKey])(implicit timeout: Timeout): Future[Iterable[ChannelUpdate]]
|
||||
|
||||
def getInfoResponse()(implicit timeout: Timeout): Future[GetInfoResponse]
|
||||
def getInfo()(implicit timeout: Timeout): Future[GetInfoResponse]
|
||||
|
||||
def usableBalances()(implicit timeout: Timeout): Future[Iterable[UsableBalance]]
|
||||
|
||||
def onChainBalance(): Future[OnChainBalance]
|
||||
|
||||
def onChainTransactions(count: Int, skip: Int): Future[Iterable[WalletTransaction]]
|
||||
|
||||
}
|
||||
|
||||
class EclairImpl(appKit: Kit) extends Eclair {
|
||||
|
@ -208,6 +218,27 @@ class EclairImpl(appKit: Kit) extends Eclair {
|
|||
|
||||
override def newAddress(): Future[String] = Future.failed(new IllegalArgumentException("this call is only available with a bitcoin core backend"))
|
||||
|
||||
override def onChainBalance(): Future[OnChainBalance] = {
|
||||
appKit.wallet match {
|
||||
case w: BitcoinCoreWallet => w.getBalance
|
||||
case _ => Future.failed(new IllegalArgumentException("this call is only available with a bitcoin core backend"))
|
||||
}
|
||||
}
|
||||
|
||||
override def onChainTransactions(count: Int, skip: Int): Future[Iterable[WalletTransaction]] = {
|
||||
appKit.wallet match {
|
||||
case w: BitcoinCoreWallet => w.listTransactions(count, skip)
|
||||
case _ => Future.failed(new IllegalArgumentException("this call is only available with a bitcoin core backend"))
|
||||
}
|
||||
}
|
||||
|
||||
override def sendOnChain(address: String, amount: Satoshi, confirmationTarget: Long): Future[ByteVector32] = {
|
||||
appKit.wallet match {
|
||||
case w: BitcoinCoreWallet => w.sendToAddress(address, amount, confirmationTarget)
|
||||
case _ => Future.failed(new IllegalArgumentException("this call is only available with a bitcoin core backend"))
|
||||
}
|
||||
}
|
||||
|
||||
override def findRoute(targetNodeId: PublicKey, amount: MilliSatoshi, assistedRoutes: Seq[Seq[PaymentRequest.ExtraHop]] = Seq.empty)(implicit timeout: Timeout): Future[RouteResponse] = {
|
||||
val maxFee = RouteCalculation.getDefaultRouteParams(appKit.nodeParams.routerConf).getMaxFee(amount)
|
||||
(appKit.router ? RouteRequest(appKit.nodeParams.nodeId, targetNodeId, amount, maxFee, assistedRoutes)).mapTo[RouteResponse]
|
||||
|
@ -319,7 +350,7 @@ class EclairImpl(appKit: Kit) extends Eclair {
|
|||
Future.fold(commands)(Map.empty[ApiTypes.ChannelIdentifier, Either[Throwable, T]])(_ + _)
|
||||
}
|
||||
|
||||
override def getInfoResponse()(implicit timeout: Timeout): Future[GetInfoResponse] = Future.successful(
|
||||
override def getInfo()(implicit timeout: Timeout): Future[GetInfoResponse] = Future.successful(
|
||||
GetInfoResponse(
|
||||
version = Kit.getVersionLong,
|
||||
color = appKit.nodeParams.color.toString,
|
||||
|
@ -327,6 +358,7 @@ class EclairImpl(appKit: Kit) extends Eclair {
|
|||
nodeId = appKit.nodeParams.nodeId,
|
||||
alias = appKit.nodeParams.alias,
|
||||
chainHash = appKit.nodeParams.chainHash,
|
||||
network = NodeParams.chainFromHash(appKit.nodeParams.chainHash),
|
||||
blockHeight = appKit.nodeParams.currentBlockHeight.toInt,
|
||||
publicAddresses = appKit.nodeParams.publicAddresses)
|
||||
)
|
||||
|
|
|
@ -124,14 +124,15 @@ object NodeParams {
|
|||
}
|
||||
}
|
||||
|
||||
def makeChainHash(chain: String): ByteVector32 = {
|
||||
chain match {
|
||||
case "regtest" => Block.RegtestGenesisBlock.hash
|
||||
case "testnet" => Block.TestnetGenesisBlock.hash
|
||||
case "mainnet" => Block.LivenetGenesisBlock.hash
|
||||
case invalid => throw new RuntimeException(s"invalid chain '$invalid'")
|
||||
}
|
||||
}
|
||||
private val chain2Hash: Map[String, ByteVector32] = Map(
|
||||
"regtest" -> Block.RegtestGenesisBlock.hash,
|
||||
"testnet" -> Block.TestnetGenesisBlock.hash,
|
||||
"mainnet" -> Block.LivenetGenesisBlock.hash
|
||||
)
|
||||
|
||||
def hashFromChain(chain: String): ByteVector32 = chain2Hash.getOrElse(chain, throw new RuntimeException(s"invalid chain '$chain'"))
|
||||
|
||||
def chainFromHash(chainHash: ByteVector32): String = chain2Hash.map(_.swap).getOrElse(chainHash, throw new RuntimeException(s"invalid chainHash '$chainHash'"))
|
||||
|
||||
def makeNodeParams(config: Config, keyManager: KeyManager, torAddress_opt: Option[NodeAddress], database: Databases, blockCount: AtomicLong, feeEstimator: FeeEstimator): NodeParams = {
|
||||
// check configuration for keys that have been renamed
|
||||
|
@ -153,7 +154,7 @@ object NodeParams {
|
|||
require(!isFeatureByteVector, "configuration key 'features' have moved from bytevector to human readable (ex: 'feature-name' = optional/mandatory)")
|
||||
|
||||
val chain = config.getString("chain")
|
||||
val chainHash = makeChainHash(chain)
|
||||
val chainHash = hashFromChain(chain)
|
||||
|
||||
val color = ByteVector.fromValidHex(config.getString("node-color"))
|
||||
require(color.size == 3, "color should be a 3-bytes hex buffer")
|
||||
|
@ -180,7 +181,7 @@ object NodeParams {
|
|||
|
||||
val expiryDeltaBlocks = CltvExpiryDelta(config.getInt("expiry-delta-blocks"))
|
||||
val fulfillSafetyBeforeTimeoutBlocks = CltvExpiryDelta(config.getInt("fulfill-safety-before-timeout-blocks"))
|
||||
require(fulfillSafetyBeforeTimeoutBlocks < expiryDeltaBlocks, "fulfill-safety-before-timeout-blocks must be smaller than expiry-delta-blocks")
|
||||
require(fulfillSafetyBeforeTimeoutBlocks * 2 < expiryDeltaBlocks, "fulfill-safety-before-timeout-blocks must be smaller than expiry-delta-blocks / 2 because it effectively reduces that delta; if you want to increase this value, you may want to increase expiry-delta-blocks as well")
|
||||
|
||||
val nodeAlias = config.getString("node-alias")
|
||||
require(nodeAlias.getBytes("UTF-8").length <= 32, "invalid alias, too long (max allowed 32 bytes)")
|
||||
|
@ -294,7 +295,9 @@ object NodeParams {
|
|||
searchHeuristicsEnabled = config.getBoolean("router.path-finding.heuristics-enable"),
|
||||
searchRatioCltv = config.getDouble("router.path-finding.ratio-cltv"),
|
||||
searchRatioChannelAge = config.getDouble("router.path-finding.ratio-channel-age"),
|
||||
searchRatioChannelCapacity = config.getDouble("router.path-finding.ratio-channel-capacity")
|
||||
searchRatioChannelCapacity = config.getDouble("router.path-finding.ratio-channel-capacity"),
|
||||
mppMinPartAmount = Satoshi(config.getLong("router.path-finding.mpp.min-amount-satoshis")).toMilliSatoshi,
|
||||
mppMaxParts = config.getInt("router.path-finding.mpp.max-parts")
|
||||
),
|
||||
socksProxy_opt = socksProxy_opt,
|
||||
maxPaymentAttempts = config.getInt("max-payment-attempts"),
|
||||
|
|
|
@ -79,7 +79,7 @@ class Setup(datadir: File,
|
|||
val seed = seed_opt.getOrElse(NodeParams.getSeed(datadir))
|
||||
val chain = config.getString("chain")
|
||||
val chaindir = new File(datadir, chain)
|
||||
val keyManager = new LocalKeyManager(seed, NodeParams.makeChainHash(chain))
|
||||
val keyManager = new LocalKeyManager(seed, NodeParams.hashFromChain(chain))
|
||||
|
||||
val database = db match {
|
||||
case Some(d) => d
|
||||
|
@ -188,8 +188,9 @@ class Setup(datadir: File,
|
|||
case feerates: FeeratesPerKB =>
|
||||
feeratesPerKB.set(feerates)
|
||||
feeratesPerKw.set(FeeratesPerKw(feerates))
|
||||
channel.Monitoring.Metrics.LocalFeeratePerKw.withoutTags().update(feeratesPerKw.get.feePerBlock(nodeParams.onChainFeeConf.feeTargets.commitmentBlockTarget))
|
||||
system.eventStream.publish(CurrentFeerates(feeratesPerKw.get))
|
||||
logger.info(s"current feeratesPerKB=${feeratesPerKB.get()} feeratesPerKw=${feeratesPerKw.get()}")
|
||||
logger.info(s"current feeratesPerKB=${feeratesPerKB.get} feeratesPerKw=${feeratesPerKw.get}")
|
||||
feeratesRetrieved.trySuccess(Done)
|
||||
})
|
||||
_ <- feeratesRetrieved.future
|
||||
|
|
|
@ -52,7 +52,7 @@ class SyncLiteSetup(datadir: File,
|
|||
|
||||
val config = system.settings.config.getConfig("eclair")
|
||||
val chain = config.getString("chain")
|
||||
val keyManager = new LocalKeyManager(randomBytes32, NodeParams.makeChainHash(chain))
|
||||
val keyManager = new LocalKeyManager(randomBytes32, NodeParams.hashFromChain(chain))
|
||||
val database = db match {
|
||||
case Some(d) => d
|
||||
case None => Databases.sqliteJDBC(new File(datadir, chain))
|
||||
|
|
|
@ -16,18 +16,18 @@
|
|||
|
||||
package fr.acinq.eclair.blockchain
|
||||
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{Satoshi, Transaction}
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.concurrent.Future
|
||||
|
||||
/**
|
||||
* Created by PM on 06/07/2017.
|
||||
*/
|
||||
* Created by PM on 06/07/2017.
|
||||
*/
|
||||
trait EclairWallet {
|
||||
|
||||
def getBalance: Future[Satoshi]
|
||||
def getBalance: Future[OnChainBalance]
|
||||
|
||||
def getReceiveAddress: Future[String]
|
||||
|
||||
|
@ -36,37 +36,31 @@ trait EclairWallet {
|
|||
def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): Future[MakeFundingTxResponse]
|
||||
|
||||
/**
|
||||
* Committing *must* include publishing the transaction on the network.
|
||||
*
|
||||
* We need to be very careful here, we don't want to consider a commit 'failed' if we are not absolutely sure that the
|
||||
* funding tx won't end up on the blockchain: if that happens and we have cancelled the channel, then we would lose our
|
||||
* funds!
|
||||
*
|
||||
* @param tx
|
||||
* @return true if success
|
||||
* false IF AND ONLY IF *HAS NOT BEEN PUBLISHED* otherwise funds are at risk!!!
|
||||
*/
|
||||
* Committing *must* include publishing the transaction on the network.
|
||||
*
|
||||
* We need to be very careful here, we don't want to consider a commit 'failed' if we are not absolutely sure that the
|
||||
* funding tx won't end up on the blockchain: if that happens and we have cancelled the channel, then we would lose our
|
||||
* funds!
|
||||
*
|
||||
* @return true if success
|
||||
* false IF AND ONLY IF *HAS NOT BEEN PUBLISHED* otherwise funds are at risk!!!
|
||||
*/
|
||||
def commit(tx: Transaction): Future[Boolean]
|
||||
|
||||
/**
|
||||
* Cancels this transaction: this probably translates to "release locks on utxos".
|
||||
*
|
||||
* @param tx
|
||||
* @return
|
||||
*/
|
||||
* Cancels this transaction: this probably translates to "release locks on utxos".
|
||||
*/
|
||||
def rollback(tx: Transaction): Future[Boolean]
|
||||
|
||||
|
||||
/**
|
||||
* Tests whether the inputs of the provided transaction have been spent by another transaction.
|
||||
*
|
||||
* Implementations may always return false if they don't want to implement it
|
||||
*
|
||||
* @param tx
|
||||
* @return
|
||||
*/
|
||||
* Tests whether the inputs of the provided transaction have been spent by another transaction.
|
||||
*
|
||||
* Implementations may always return false if they don't want to implement it
|
||||
*/
|
||||
def doubleSpent(tx: Transaction): Future[Boolean]
|
||||
|
||||
}
|
||||
|
||||
final case class OnChainBalance(confirmed: Satoshi, unconfirmed: Satoshi)
|
||||
|
||||
final case class MakeFundingTxResponse(fundingTx: Transaction, fundingTxOutputIndex: Int, fee: Satoshi)
|
||||
|
|
|
@ -16,19 +16,16 @@
|
|||
|
||||
package fr.acinq.eclair.blockchain.bitcoind
|
||||
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin._
|
||||
import fr.acinq.eclair._
|
||||
import fr.acinq.eclair.blockchain._
|
||||
import fr.acinq.eclair.blockchain.bitcoind.rpc.{BitcoinJsonRPCClient, Error, ExtendedBitcoinClient, JsonRPCError}
|
||||
import fr.acinq.eclair.transactions.Transactions
|
||||
import grizzled.slf4j.Logging
|
||||
import org.json4s.DefaultFormats
|
||||
import org.json4s.JsonAST._
|
||||
import org.json4s.jackson.Serialization
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.compat.Platform
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
/**
|
||||
|
@ -46,7 +43,7 @@ class BitcoinCoreWallet(rpcClient: BitcoinJsonRPCClient)(implicit ec: ExecutionC
|
|||
val JString(hex) = json \ "hex"
|
||||
val JInt(changepos) = json \ "changepos"
|
||||
val JDecimal(fee) = json \ "fee"
|
||||
FundTransactionResponse(Transaction.read(hex), changepos.intValue, Satoshi(fee.bigDecimal.scaleByPowerOfTen(8).longValue))
|
||||
FundTransactionResponse(Transaction.read(hex), changepos.intValue, toSatoshi(fee))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -67,13 +64,74 @@ class BitcoinCoreWallet(rpcClient: BitcoinJsonRPCClient)(implicit ec: ExecutionC
|
|||
|
||||
def publishTransaction(tx: Transaction)(implicit ec: ExecutionContext): Future[String] = bitcoinClient.publishTransaction(tx)
|
||||
|
||||
def publishTransaction(hex: String)(implicit ec: ExecutionContext): Future[String] = rpcClient.invoke("sendrawtransaction", hex) collect { case JString(txid) => txid }
|
||||
def listTransactions(count: Int, skip: Int): Future[List[WalletTransaction]] = rpcClient.invoke("listtransactions", "*", count, skip).map {
|
||||
case JArray(txs) => txs.map(tx => {
|
||||
val JString(address) = tx \ "address"
|
||||
val JDecimal(amount) = tx \ "amount"
|
||||
// fee is optional and only included for sent transactions
|
||||
val fee = tx \ "fee" match {
|
||||
case JDecimal(fee) => toSatoshi(fee)
|
||||
case _ => Satoshi(0)
|
||||
}
|
||||
val JInt(confirmations) = tx \ "confirmations"
|
||||
// while transactions are still in the mempool, block hash will no be included
|
||||
val blockHash = tx \ "blockhash" match {
|
||||
case JString(blockHash) => ByteVector32.fromValidHex(blockHash)
|
||||
case _ => ByteVector32.Zeroes
|
||||
}
|
||||
val JString(txid) = tx \ "txid"
|
||||
val JInt(timestamp) = tx \ "time"
|
||||
WalletTransaction(address, toSatoshi(amount), fee, blockHash, confirmations.toLong, ByteVector32.fromValidHex(txid), timestamp.toLong)
|
||||
}).reverse
|
||||
case _ => Nil
|
||||
}
|
||||
|
||||
def unlockOutpoints(outPoints: Seq[OutPoint])(implicit ec: ExecutionContext): Future[Boolean] = rpcClient.invoke("lockunspent", true, outPoints.toList.map(outPoint => Utxo(outPoint.txid.toString, outPoint.index))) collect { case JBool(result) => result }
|
||||
def sendToAddress(address: String, amount: Satoshi, confirmationTarget: Long): Future[ByteVector32] = {
|
||||
rpcClient.invoke(
|
||||
"sendtoaddress",
|
||||
address,
|
||||
amount.toBtc.toBigDecimal,
|
||||
"sent via eclair",
|
||||
"",
|
||||
false, // subtractfeefromamount
|
||||
true, // replaceable
|
||||
confirmationTarget).collect {
|
||||
case JString(txid) => ByteVector32.fromValidHex(txid)
|
||||
}
|
||||
}
|
||||
|
||||
def isTransactionOutputSpendable(txId: String, outputIndex: Int, includeMempool: Boolean)(implicit ec: ExecutionContext): Future[Boolean] = rpcClient.invoke("gettxout", txId, outputIndex, includeMempool) collect { case j => j != JNull }
|
||||
/**
|
||||
*
|
||||
* @param outPoints outpoints to unlock
|
||||
* @return true if all outpoints were successfully unlocked, false otherwise
|
||||
*/
|
||||
def unlockOutpoints(outPoints: Seq[OutPoint])(implicit ec: ExecutionContext): Future[Boolean] = {
|
||||
// we unlock utxos one by one and not as a list as it would fail at the first utxo that is not actually lock and the rest would not be processed
|
||||
val futures = outPoints
|
||||
.map(outPoint => Utxo(outPoint.txid, outPoint.index))
|
||||
.map(utxo => rpcClient
|
||||
.invoke("lockunspent", true, List(utxo))
|
||||
.mapTo[JBool]
|
||||
.map {
|
||||
case JBool(result) => result
|
||||
}
|
||||
.recover {
|
||||
case JsonRPCError(error) if error.message.contains("expected locked output") =>
|
||||
true // we consider that the outpoint was successfully unlocked (since it was not locked to begin with)
|
||||
case t =>
|
||||
logger.warn(s"Cannot unlock utxo=$utxo", t)
|
||||
false
|
||||
})
|
||||
val future = Future.sequence(futures)
|
||||
// return true if all outpoints were unlocked false otherwise
|
||||
future.map(_.forall(b => b))
|
||||
}
|
||||
|
||||
override def getBalance: Future[Satoshi] = rpcClient.invoke("getbalance") collect { case JDecimal(balance) => Satoshi(balance.bigDecimal.scaleByPowerOfTen(8).longValue()) }
|
||||
override def getBalance: Future[OnChainBalance] = rpcClient.invoke("getbalances").map(json => {
|
||||
val JDecimal(confirmed) = json \ "mine" \ "trusted"
|
||||
val JDecimal(unconfirmed) = json \ "mine" \ "untrusted_pending"
|
||||
OnChainBalance(toSatoshi(confirmed), toSatoshi(unconfirmed))
|
||||
})
|
||||
|
||||
override def getReceiveAddress: Future[String] = for {
|
||||
JString(address) <- rpcClient.invoke("getnewaddress")
|
||||
|
@ -118,35 +176,41 @@ class BitcoinCoreWallet(rpcClient: BitcoinJsonRPCClient)(implicit ec: ExecutionC
|
|||
|
||||
override def commit(tx: Transaction): Future[Boolean] = publishTransaction(tx)
|
||||
.map(_ => true) // if bitcoind says OK, then we consider the tx successfully published
|
||||
.recoverWith { case JsonRPCError(e) =>
|
||||
logger.warn(s"txid=${tx.txid} error=$e")
|
||||
bitcoinClient.getTransaction(tx.txid).map(_ => true).recover { case _ => false } // if we get a parseable error from bitcoind AND the tx is NOT in the mempool/blockchain, then we consider that the tx was not published
|
||||
}
|
||||
.recoverWith {
|
||||
case e =>
|
||||
logger.warn(s"txid=${tx.txid} error=$e")
|
||||
bitcoinClient.getTransaction(tx.txid)
|
||||
.map(_ => true) // tx is in the mempool, we consider that it was published
|
||||
.recoverWith {
|
||||
case _ =>
|
||||
rollback(tx).map { _ => false }.recover { case _ => false } // we use transform here because we want to return false in all cases even if rollback fails
|
||||
}
|
||||
}
|
||||
.recover { case _ => true } // in all other cases we consider that the tx has been published
|
||||
|
||||
override def rollback(tx: Transaction): Future[Boolean] = unlockOutpoints(tx.txIn.map(_.outPoint)) // we unlock all utxos used by the tx
|
||||
|
||||
override def doubleSpent(tx: Transaction): Future[Boolean] =
|
||||
for {
|
||||
exists <- bitcoinClient.getTransaction(tx.txid)
|
||||
.map(_ => true) // we have found the transaction
|
||||
.recover {
|
||||
case JsonRPCError(Error(_, message)) if message.contains("indexing") =>
|
||||
sys.error("Fatal error: bitcoind is indexing!!")
|
||||
System.exit(1) // bitcoind is indexing, that's a fatal error!!
|
||||
false // won't be reached
|
||||
case _ => false
|
||||
}
|
||||
doublespent <- if (exists) {
|
||||
// if the tx is in the blockchain, it can't have been double-spent
|
||||
Future.successful(false)
|
||||
} else {
|
||||
// if the tx wasn't in the blockchain and one of it's input has been spent, it is double-spent
|
||||
// NB: we don't look in the mempool, so it means that we will only consider that the tx has been double-spent if
|
||||
// the overriding transaction has been confirmed at least once
|
||||
Future.sequence(tx.txIn.map(txIn => isTransactionOutputSpendable(txIn.outPoint.txid.toHex, txIn.outPoint.index.toInt, includeMempool = false))).map(_.exists(_ == false))
|
||||
}
|
||||
} yield doublespent
|
||||
for {
|
||||
exists <- bitcoinClient.getTransaction(tx.txid)
|
||||
.map(_ => true) // we have found the transaction
|
||||
.recover {
|
||||
case JsonRPCError(Error(_, message)) if message.contains("index") =>
|
||||
sys.error("Fatal error: bitcoind is indexing!!")
|
||||
System.exit(1) // bitcoind is indexing, that's a fatal error!!
|
||||
false // won't be reached
|
||||
case _ => false
|
||||
}
|
||||
doublespent <- if (exists) {
|
||||
// if the tx is in the blockchain, it can't have been double-spent
|
||||
Future.successful(false)
|
||||
} else {
|
||||
// if the tx wasn't in the blockchain and one of it's input has been spent, it is double-spent
|
||||
// NB: we don't look in the mempool, so it means that we will only consider that the tx has been double-spent if
|
||||
// the overriding transaction has been confirmed at least once
|
||||
Future.sequence(tx.txIn.map(txIn => bitcoinClient.isTransactionOutputSpendable(txIn.outPoint.txid, txIn.outPoint.index.toInt, includeMempool = false))).map(_.exists(_ == false))
|
||||
}
|
||||
} yield doublespent
|
||||
|
||||
}
|
||||
|
||||
|
@ -154,9 +218,12 @@ object BitcoinCoreWallet {
|
|||
|
||||
// @formatter:off
|
||||
case class Options(lockUnspents: Boolean, feeRate: BigDecimal)
|
||||
case class Utxo(txid: String, vout: Long)
|
||||
case class Utxo(txid: ByteVector32, vout: Long)
|
||||
case class WalletTransaction(address: String, amount: Satoshi, fees: Satoshi, blockHash: ByteVector32, confirmations: Long, txid: ByteVector32, timestamp: Long)
|
||||
case class FundTransactionResponse(tx: Transaction, changepos: Int, fee: Satoshi)
|
||||
case class SignTransactionResponse(tx: Transaction, complete: Boolean)
|
||||
// @formatter:on
|
||||
|
||||
private def toSatoshi(amount: BigDecimal): Satoshi = Satoshi(amount.bigDecimal.scaleByPowerOfTen(8).longValue)
|
||||
|
||||
}
|
|
@ -22,7 +22,7 @@ import fr.acinq.bitcoin.{ByteVector32, Crypto, Satoshi, Script, Transaction, TxO
|
|||
import fr.acinq.eclair.addressToPublicKeyScript
|
||||
import fr.acinq.eclair.blockchain.electrum.ElectrumClient.BroadcastTransaction
|
||||
import fr.acinq.eclair.blockchain.electrum.ElectrumWallet._
|
||||
import fr.acinq.eclair.blockchain.{EclairWallet, MakeFundingTxResponse}
|
||||
import fr.acinq.eclair.blockchain.{EclairWallet, MakeFundingTxResponse, OnChainBalance}
|
||||
import grizzled.slf4j.Logging
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
|
@ -30,9 +30,9 @@ import scala.concurrent.{ExecutionContext, Future}
|
|||
|
||||
class ElectrumEclairWallet(val wallet: ActorRef, chainHash: ByteVector32)(implicit system: ActorSystem, ec: ExecutionContext, timeout: akka.util.Timeout) extends EclairWallet with Logging {
|
||||
|
||||
override def getBalance = (wallet ? GetBalance).mapTo[GetBalanceResponse].map(balance => balance.confirmed + balance.unconfirmed)
|
||||
override def getBalance: Future[OnChainBalance] = (wallet ? GetBalance).mapTo[GetBalanceResponse].map(balance => OnChainBalance(balance.confirmed, balance.unconfirmed))
|
||||
|
||||
override def getReceiveAddress = (wallet ? GetCurrentReceiveAddress).mapTo[GetCurrentReceiveAddressResponse].map(_.address)
|
||||
override def getReceiveAddress: Future[String] = (wallet ? GetCurrentReceiveAddress).mapTo[GetCurrentReceiveAddressResponse].map(_.address)
|
||||
|
||||
override def getReceivePubkey(receiveAddress: Option[String] = None): Future[Crypto.PublicKey] = Future.failed(new RuntimeException("Not implemented"))
|
||||
|
||||
|
@ -40,10 +40,10 @@ class ElectrumEclairWallet(val wallet: ActorRef, chainHash: ByteVector32)(implic
|
|||
|
||||
override def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): Future[MakeFundingTxResponse] = {
|
||||
val tx = Transaction(version = 2, txIn = Nil, txOut = TxOut(amount, pubkeyScript) :: Nil, lockTime = 0)
|
||||
(wallet ? CompleteTransaction(tx, feeRatePerKw)).mapTo[CompleteTransactionResponse].map(response => response match {
|
||||
(wallet ? CompleteTransaction(tx, feeRatePerKw)).mapTo[CompleteTransactionResponse].map {
|
||||
case CompleteTransactionResponse(tx1, fee1, None) => MakeFundingTxResponse(tx1, 0, fee1)
|
||||
case CompleteTransactionResponse(_, _, Some(error)) => throw error
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
override def commit(tx: Transaction): Future[Boolean] =
|
||||
|
@ -70,7 +70,6 @@ class ElectrumEclairWallet(val wallet: ActorRef, chainHash: ByteVector32)(implic
|
|||
def sendPayment(amount: Satoshi, address: String, feeRatePerKw: Long): Future[String] = {
|
||||
val publicKeyScript = Script.write(addressToPublicKeyScript(address, chainHash))
|
||||
val tx = Transaction(version = 2, txIn = Nil, txOut = TxOut(amount, publicKeyScript) :: Nil, lockTime = 0)
|
||||
|
||||
(wallet ? CompleteTransaction(tx, feeRatePerKw))
|
||||
.mapTo[CompleteTransactionResponse]
|
||||
.flatMap {
|
||||
|
@ -96,4 +95,5 @@ class ElectrumEclairWallet(val wallet: ActorRef, chainHash: ByteVector32)(implic
|
|||
override def doubleSpent(tx: Transaction): Future[Boolean] = {
|
||||
(wallet ? IsDoubleSpent(tx)).mapTo[IsDoubleSpentResponse].map(_.isDoubleSpent)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ class BitgoFeeProvider(chainHash: ByteVector32, readTimeOut: Duration)(implicit
|
|||
.send()
|
||||
feeRanges = parseFeeRanges(res.unsafeBody)
|
||||
} yield extractFeerates(feeRanges)
|
||||
|
||||
}
|
||||
|
||||
object BitgoFeeProvider {
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright 2020 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.blockchain.fee
|
||||
|
||||
import fr.acinq.eclair.db.FeeratesDb
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
|
||||
class DbFeeProvider(db: FeeratesDb, provider: FeeProvider)(implicit ec: ExecutionContext) extends FeeProvider {
|
||||
|
||||
/** This method retrieves feerates from the provider, and store results in the database */
|
||||
override def getFeerates: Future[FeeratesPerKB] =
|
||||
provider.getFeerates map { feerates =>
|
||||
db.addOrUpdateFeerates(feerates)
|
||||
feerates
|
||||
}
|
||||
|
||||
}
|
|
@ -44,6 +44,7 @@ class EarnDotComFeeProvider(readTimeOut: Duration)(implicit http: SttpBackend[Fu
|
|||
.send()
|
||||
feeRanges = parseFeeRanges(json.unsafeBody)
|
||||
} yield extractFeerates(feeRanges)
|
||||
|
||||
}
|
||||
|
||||
object EarnDotComFeeProvider {
|
||||
|
|
|
@ -34,6 +34,7 @@ class SmoothFeeProvider(provider: FeeProvider, windowSize: Int)(implicit ec: Exe
|
|||
_ = append(rate)
|
||||
} yield SmoothFeeProvider.smooth(queue)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object SmoothFeeProvider {
|
||||
|
|
|
@ -185,10 +185,7 @@ class Channel(val nodeParams: NodeParams, val wallet: EclairWallet, remoteNodeId
|
|||
maxAcceptedHtlcs = localParams.maxAcceptedHtlcs,
|
||||
fundingPubkey = fundingPubKey,
|
||||
revocationBasepoint = keyManager.revocationPoint(channelKeyPath).publicKey,
|
||||
paymentBasepoint = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localParams.localPaymentBasepoint.get
|
||||
case _ => keyManager.paymentPoint(channelKeyPath).publicKey
|
||||
},
|
||||
paymentBasepoint = localParams.staticPaymentBasepoint.getOrElse(keyManager.paymentPoint(channelKeyPath).publicKey),
|
||||
delayedPaymentBasepoint = keyManager.delayedPaymentPoint(channelKeyPath).publicKey,
|
||||
htlcBasepoint = keyManager.htlcPoint(channelKeyPath).publicKey,
|
||||
firstPerCommitmentPoint = keyManager.commitmentPoint(channelKeyPath, 0),
|
||||
|
@ -319,10 +316,7 @@ class Channel(val nodeParams: NodeParams, val wallet: EclairWallet, remoteNodeId
|
|||
maxAcceptedHtlcs = localParams.maxAcceptedHtlcs,
|
||||
fundingPubkey = fundingPubkey,
|
||||
revocationBasepoint = keyManager.revocationPoint(channelKeyPath).publicKey,
|
||||
paymentBasepoint = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localParams.localPaymentBasepoint.get
|
||||
case _ => keyManager.paymentPoint(channelKeyPath).publicKey
|
||||
},
|
||||
paymentBasepoint = localParams.staticPaymentBasepoint.getOrElse(keyManager.paymentPoint(channelKeyPath).publicKey),
|
||||
delayedPaymentBasepoint = keyManager.delayedPaymentPoint(channelKeyPath).publicKey,
|
||||
htlcBasepoint = keyManager.htlcPoint(channelKeyPath).publicKey,
|
||||
firstPerCommitmentPoint = keyManager.commitmentPoint(channelKeyPath, 0),
|
||||
|
@ -1946,7 +1940,8 @@ class Channel(val nodeParams: NodeParams, val wallet: EclairWallet, remoteNodeId
|
|||
// Downstream timed out.
|
||||
handleLocalError(HtlcsTimedoutDownstream(d.channelId, timedOutOutgoing), d, Some(c))
|
||||
} else if (almostTimedOutIncoming.nonEmpty) {
|
||||
// Upstream is close to timing out.
|
||||
// Upstream is close to timing out, we need to test if we have funds at risk: htlcs for which we know the preimage
|
||||
// that are still in our commitment (upstream will try to timeout on-chain).
|
||||
val relayedFulfills = d.commitments.localChanges.all.collect { case u: UpdateFulfillHtlc => u.id }.toSet
|
||||
val offendingRelayedHtlcs = almostTimedOutIncoming.filter(htlc => relayedFulfills.contains(htlc.id))
|
||||
if (offendingRelayedHtlcs.nonEmpty) {
|
||||
|
@ -2114,7 +2109,7 @@ class Channel(val nodeParams: NodeParams, val wallet: EclairWallet, remoteNodeId
|
|||
def handleRemoteSpentFuture(commitTx: Transaction, d: DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT) = {
|
||||
log.warning(s"they published their future commit (because we asked them to) in txid=${commitTx.txid}")
|
||||
d.commitments.channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) =>
|
||||
case v if v.hasStaticRemotekey =>
|
||||
val remoteCommitPublished = RemoteCommitPublished(commitTx, None, List.empty, List.empty, Map.empty)
|
||||
val nextData = DATA_CLOSING(d.commitments, fundingTx = None, waitingSince = now, Nil, futureRemoteCommitPublished = Some(remoteCommitPublished))
|
||||
goto(CLOSING) using nextData storing() // we don't need to claim our main output in the remote commit because it already spends to our wallet address
|
||||
|
|
|
@ -214,7 +214,8 @@ final case class DATA_CLOSING(commitments: Commitments,
|
|||
nextRemoteCommitPublished: Option[RemoteCommitPublished] = None,
|
||||
futureRemoteCommitPublished: Option[RemoteCommitPublished] = None,
|
||||
revokedCommitPublished: List[RevokedCommitPublished] = Nil) extends Data with HasCommitments {
|
||||
def spendingTxes = mutualClosePublished ::: localCommitPublished.map(_.commitTx).toList ::: remoteCommitPublished.map(_.commitTx).toList ::: nextRemoteCommitPublished.map(_.commitTx).toList ::: futureRemoteCommitPublished.map(_.commitTx).toList ::: revokedCommitPublished.map(_.commitTx)
|
||||
val spendingTxes = mutualClosePublished ::: localCommitPublished.map(_.commitTx).toList ::: remoteCommitPublished.map(_.commitTx).toList ::: nextRemoteCommitPublished.map(_.commitTx).toList ::: futureRemoteCommitPublished.map(_.commitTx).toList ::: revokedCommitPublished.map(_.commitTx)
|
||||
require(spendingTxes.nonEmpty, "there must be at least one tx published in this state")
|
||||
}
|
||||
|
||||
final case class DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT(commitments: Commitments, remoteChannelReestablish: ChannelReestablish) extends Data with HasCommitments
|
||||
|
@ -229,7 +230,7 @@ final case class LocalParams(nodeId: PublicKey,
|
|||
maxAcceptedHtlcs: Int,
|
||||
isFunder: Boolean,
|
||||
defaultFinalScriptPubKey: ByteVector,
|
||||
localPaymentBasepoint: Option[PublicKey],
|
||||
staticPaymentBasepoint: Option[PublicKey],
|
||||
features: Features)
|
||||
|
||||
final case class RemoteParams(nodeId: PublicKey,
|
||||
|
@ -252,27 +253,33 @@ object ChannelFlags {
|
|||
}
|
||||
|
||||
case class ChannelVersion(bits: BitVector) {
|
||||
import ChannelVersion._
|
||||
|
||||
require(bits.size == ChannelVersion.LENGTH_BITS, "channel version takes 4 bytes")
|
||||
|
||||
def |(other: ChannelVersion) = ChannelVersion(bits | other.bits)
|
||||
def &(other: ChannelVersion) = ChannelVersion(bits & other.bits)
|
||||
def ^(other: ChannelVersion) = ChannelVersion(bits ^ other.bits)
|
||||
def isSet(bit: Int) = bits.reverse.get(bit)
|
||||
|
||||
private def isSet(bit: Int) = bits.reverse.get(bit)
|
||||
|
||||
// formatter:off
|
||||
def hasPubkeyKeyPath: Boolean = isSet(USE_PUBKEY_KEYPATH_BIT)
|
||||
def hasStaticRemotekey: Boolean = isSet(USE_STATIC_REMOTEKEY_BIT)
|
||||
// formatter:on
|
||||
}
|
||||
|
||||
object ChannelVersion {
|
||||
import scodec.bits._
|
||||
val LENGTH_BITS = 4 * 8
|
||||
val LENGTH_BITS: Int = 4 * 8
|
||||
|
||||
private val USE_PUBKEY_KEYPATH_BIT = 0 // bit numbers start at 0
|
||||
private val USE_STATIC_REMOTEKEY_BIT = 1
|
||||
|
||||
private def setBit(bit: Int) = ChannelVersion(BitVector.low(LENGTH_BITS).set(bit).reverse)
|
||||
|
||||
val ZEROES = ChannelVersion(bin"00000000000000000000000000000000")
|
||||
val USE_PUBKEY_KEYPATH_BIT = 0 // bit numbers start at 0
|
||||
val USE_STATIC_REMOTEKEY_BIT = 1
|
||||
|
||||
def fromBit(bit: Int) = ChannelVersion(BitVector.low(LENGTH_BITS).set(bit).reverse)
|
||||
|
||||
val USE_PUBKEY_KEYPATH = fromBit(USE_PUBKEY_KEYPATH_BIT)
|
||||
val USE_STATIC_REMOTEKEY = fromBit(USE_STATIC_REMOTEKEY_BIT)
|
||||
|
||||
val STANDARD = ZEROES | USE_PUBKEY_KEYPATH
|
||||
val STATIC_REMOTEKEY = STANDARD | USE_STATIC_REMOTEKEY // USE_PUBKEY_KEYPATH + USE_STATIC_REMOTEKEY
|
||||
val STANDARD = ZEROES | setBit(USE_PUBKEY_KEYPATH_BIT)
|
||||
val STATIC_REMOTEKEY = STANDARD | setBit(USE_STATIC_REMOTEKEY_BIT) // PUBKEY_KEYPATH + STATIC_REMOTEKEY
|
||||
}
|
||||
// @formatter:on
|
||||
|
|
|
@ -21,6 +21,7 @@ import fr.acinq.eclair.channel.ChannelVersion._
|
|||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey, sha256}
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64, Crypto}
|
||||
import fr.acinq.eclair.blockchain.fee.{FeeEstimator, FeeTargets}
|
||||
import fr.acinq.eclair.channel.Monitoring.Metrics
|
||||
import fr.acinq.eclair.crypto.{Generators, KeyManager, ShaChain, Sphinx}
|
||||
import fr.acinq.eclair.payment.relay.{Origin, Relayer}
|
||||
import fr.acinq.eclair.transactions.DirectedHtlc._
|
||||
|
@ -63,7 +64,7 @@ case class Commitments(channelVersion: ChannelVersion,
|
|||
commitInput: InputInfo,
|
||||
remotePerCommitmentSecrets: ShaChain, channelId: ByteVector32) {
|
||||
|
||||
require(!channelVersion.isSet(USE_STATIC_REMOTEKEY_BIT) || (channelVersion.isSet(USE_STATIC_REMOTEKEY_BIT) && localParams.localPaymentBasepoint.isDefined), s"localParams.localPaymentBasepoint must be defined for commitments with version=$channelVersion")
|
||||
require(!channelVersion.hasStaticRemotekey || (channelVersion.hasStaticRemotekey && localParams.staticPaymentBasepoint.isDefined), s"localParams.localPaymentBasepoint must be defined for commitments with version=$channelVersion")
|
||||
|
||||
def hasNoPendingHtlcs: Boolean = localCommit.spec.htlcs.isEmpty && remoteCommit.spec.htlcs.isEmpty && remoteNextCommitInfo.isRight
|
||||
|
||||
|
@ -76,8 +77,8 @@ case class Commitments(channelVersion: ChannelVersion,
|
|||
}
|
||||
|
||||
/**
|
||||
* HTLCs that are close to timing out upstream are potentially dangerous. If we received the pre-image for those
|
||||
* HTLCs, we need to get a remote signed updated commitment that removes this HTLC.
|
||||
* HTLCs that are close to timing out upstream are potentially dangerous. If we received the preimage for those HTLCs,
|
||||
* we need to get a remote signed updated commitment that removes those HTLCs.
|
||||
* Otherwise when we get close to the upstream timeout, we risk an on-chain race condition between their HTLC timeout
|
||||
* and our HTLC success in case of a force-close.
|
||||
*/
|
||||
|
@ -389,13 +390,15 @@ object Commitments {
|
|||
}
|
||||
}
|
||||
|
||||
def receiveFee(commitments: Commitments, feeEstimator: FeeEstimator, feeTargets: FeeTargets, fee: UpdateFee, maxFeerateMismatch: Double): Try[Commitments] = {
|
||||
def receiveFee(commitments: Commitments, feeEstimator: FeeEstimator, feeTargets: FeeTargets, fee: UpdateFee, maxFeerateMismatch: Double)(implicit log: LoggingAdapter): Try[Commitments] = {
|
||||
if (commitments.localParams.isFunder) {
|
||||
Failure(FundeeCannotSendUpdateFee(commitments.channelId))
|
||||
} else if (fee.feeratePerKw < fr.acinq.eclair.MinimumFeeratePerKw) {
|
||||
Failure(FeerateTooSmall(commitments.channelId, remoteFeeratePerKw = fee.feeratePerKw))
|
||||
} else {
|
||||
Metrics.RemoteFeeratePerKw.withoutTags().record(fee.feeratePerKw)
|
||||
val localFeeratePerKw = feeEstimator.getFeeratePerKw(target = feeTargets.commitmentBlockTarget)
|
||||
log.info("remote feeratePerKw={}, local feeratePerKw={}, ratio={}", fee.feeratePerKw, localFeeratePerKw, fee.feeratePerKw.toDouble / localFeeratePerKw)
|
||||
if (Helpers.isFeeDiffTooHigh(fee.feeratePerKw, localFeeratePerKw, maxFeerateMismatch)) {
|
||||
Failure(FeerateTooDifferent(commitments.channelId, localFeeratePerKw = localFeeratePerKw, remoteFeeratePerKw = fee.feeratePerKw))
|
||||
} else {
|
||||
|
@ -601,16 +604,10 @@ object Commitments {
|
|||
val channelKeyPath = keyManager.channelKeyPath(localParams, channelVersion)
|
||||
val localDelayedPaymentPubkey = Generators.derivePubKey(keyManager.delayedPaymentPoint(channelKeyPath).publicKey, localPerCommitmentPoint)
|
||||
val localHtlcPubkey = Generators.derivePubKey(keyManager.htlcPoint(channelKeyPath).publicKey, localPerCommitmentPoint)
|
||||
val remotePaymentPubkey = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => remoteParams.paymentBasepoint
|
||||
case _ => Generators.derivePubKey(remoteParams.paymentBasepoint, localPerCommitmentPoint)
|
||||
}
|
||||
val remotePaymentPubkey = if (channelVersion.hasStaticRemotekey) remoteParams.paymentBasepoint else Generators.derivePubKey(remoteParams.paymentBasepoint, localPerCommitmentPoint)
|
||||
val remoteHtlcPubkey = Generators.derivePubKey(remoteParams.htlcBasepoint, localPerCommitmentPoint)
|
||||
val localRevocationPubkey = Generators.revocationPubKey(remoteParams.revocationBasepoint, localPerCommitmentPoint)
|
||||
val localPaymentBasepoint = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localParams.localPaymentBasepoint.get
|
||||
case _ => keyManager.paymentPoint(channelKeyPath).publicKey
|
||||
}
|
||||
val localPaymentBasepoint = localParams.staticPaymentBasepoint.getOrElse(keyManager.paymentPoint(channelKeyPath).publicKey)
|
||||
val outputs = makeCommitTxOutputs(localParams.isFunder, localParams.dustLimit, localRevocationPubkey, remoteParams.toSelfDelay, localDelayedPaymentPubkey, remotePaymentPubkey, localHtlcPubkey, remoteHtlcPubkey, spec)
|
||||
val commitTx = Transactions.makeCommitTx(commitmentInput, commitTxNumber, localPaymentBasepoint, remoteParams.paymentBasepoint, localParams.isFunder, outputs)
|
||||
val (htlcTimeoutTxs, htlcSuccessTxs) = Transactions.makeHtlcTxs(commitTx.tx, localParams.dustLimit, localRevocationPubkey, remoteParams.toSelfDelay, localDelayedPaymentPubkey, spec.feeratePerKw, outputs)
|
||||
|
@ -624,14 +621,8 @@ object Commitments {
|
|||
remotePerCommitmentPoint: PublicKey,
|
||||
spec: CommitmentSpec): (CommitTx, Seq[HtlcTimeoutTx], Seq[HtlcSuccessTx]) = {
|
||||
val channelKeyPath = keyManager.channelKeyPath(localParams, channelVersion)
|
||||
val localPaymentBasepoint = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localParams.localPaymentBasepoint.get
|
||||
case _ => keyManager.paymentPoint(channelKeyPath).publicKey
|
||||
}
|
||||
val localPaymentPubkey = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localPaymentBasepoint
|
||||
case _ => Generators.derivePubKey(localPaymentBasepoint, remotePerCommitmentPoint)
|
||||
}
|
||||
val localPaymentBasepoint = localParams.staticPaymentBasepoint.getOrElse(keyManager.paymentPoint(channelKeyPath).publicKey)
|
||||
val localPaymentPubkey = if (channelVersion.hasStaticRemotekey) localPaymentBasepoint else Generators.derivePubKey(localPaymentBasepoint, remotePerCommitmentPoint)
|
||||
val localHtlcPubkey = Generators.derivePubKey(keyManager.htlcPoint(channelKeyPath).publicKey, remotePerCommitmentPoint)
|
||||
val remoteDelayedPaymentPubkey = Generators.derivePubKey(remoteParams.delayedPaymentBasepoint, remotePerCommitmentPoint)
|
||||
val remoteHtlcPubkey = Generators.derivePubKey(remoteParams.htlcBasepoint, remotePerCommitmentPoint)
|
||||
|
|
|
@ -635,7 +635,7 @@ object Helpers {
|
|||
}.toSeq.flatten
|
||||
|
||||
channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) =>
|
||||
case v if v.hasStaticRemotekey =>
|
||||
RemoteCommitPublished(
|
||||
commitTx = tx,
|
||||
claimMainOutputTx = None,
|
||||
|
@ -696,10 +696,7 @@ object Helpers {
|
|||
require(tx.txIn.size == 1, "commitment tx should have 1 input")
|
||||
val channelKeyPath = keyManager.channelKeyPath(localParams, channelVersion)
|
||||
val obscuredTxNumber = Transactions.decodeTxNumber(tx.txIn.head.sequence, tx.lockTime)
|
||||
val localPaymentPoint = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) => localParams.localPaymentBasepoint.get
|
||||
case _ => keyManager.paymentPoint(channelKeyPath).publicKey
|
||||
}
|
||||
val localPaymentPoint = localParams.staticPaymentBasepoint.getOrElse(keyManager.paymentPoint(channelKeyPath).publicKey)
|
||||
// this tx has been published by remote, so we need to invert local/remote params
|
||||
val txnumber = Transactions.obscuredCommitTxNumber(obscuredTxNumber, !localParams.isFunder, remoteParams.paymentBasepoint, localPaymentPoint)
|
||||
require(txnumber <= 0xffffffffffffL, "txnumber must be lesser than 48 bits long")
|
||||
|
@ -721,7 +718,7 @@ object Helpers {
|
|||
|
||||
// first we will claim our main output right away
|
||||
val mainTx = channelVersion match {
|
||||
case v if v.isSet(USE_STATIC_REMOTEKEY_BIT) =>
|
||||
case v if v.hasStaticRemotekey =>
|
||||
log.info(s"channel uses option_static_remotekey, not claiming our p2wpkh output")
|
||||
None
|
||||
case _ => generateTx("claim-p2wpkh-output") {
|
||||
|
|
|
@ -25,6 +25,8 @@ object Monitoring {
|
|||
val ChannelsCount = Kamon.gauge("channels.count")
|
||||
val ChannelErrors = Kamon.counter("channels.errors")
|
||||
val ChannelLifecycleEvents = Kamon.counter("channels.lifecycle")
|
||||
val LocalFeeratePerKw = Kamon.gauge("channels.local-feerate-per-kw")
|
||||
val RemoteFeeratePerKw = Kamon.histogram("channels.remote-feerate-per-kw")
|
||||
}
|
||||
|
||||
object Tags {
|
||||
|
|
|
@ -45,7 +45,7 @@ trait KeyManager {
|
|||
|
||||
def commitmentPoint(channelKeyPath: DeterministicWallet.KeyPath, index: Long): Crypto.PublicKey
|
||||
|
||||
def channelKeyPath(localParams: LocalParams, channelVersion: ChannelVersion): DeterministicWallet.KeyPath = if (channelVersion.isSet(ChannelVersion.USE_PUBKEY_KEYPATH_BIT)) {
|
||||
def channelKeyPath(localParams: LocalParams, channelVersion: ChannelVersion): DeterministicWallet.KeyPath = if (channelVersion.hasPubkeyKeyPath) {
|
||||
// deterministic mode: use the funding pubkey to compute the channel key path
|
||||
KeyManager.channelKeyPath(fundingPublicKey(localParams.fundingKeyPath))
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2020 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.db
|
||||
|
||||
import java.io.Closeable
|
||||
|
||||
import fr.acinq.eclair.blockchain.fee.FeeratesPerKB
|
||||
|
||||
/**
|
||||
* This database stores the fee rates retrieved by a [[fr.acinq.eclair.blockchain.fee.FeeProvider]].
|
||||
*/
|
||||
trait FeeratesDb extends Closeable {
|
||||
|
||||
/** Insert or update the feerates into the feerates database. */
|
||||
def addOrUpdateFeerates(feeratesPerKB: FeeratesPerKB): Unit
|
||||
|
||||
/** Return the (optional) feerates from the feerates database. */
|
||||
def getFeerates(): Option[FeeratesPerKB]
|
||||
|
||||
}
|
|
@ -24,12 +24,10 @@ import fr.acinq.bitcoin.{ByteVector32, Satoshi}
|
|||
import fr.acinq.eclair.channel.{ChannelErrorOccurred, LocalError, NetworkFeePaid, RemoteError}
|
||||
import fr.acinq.eclair.db._
|
||||
import fr.acinq.eclair.payment._
|
||||
import fr.acinq.eclair.wire.ChannelCodecs
|
||||
import fr.acinq.eclair.{LongToBtcAmount, MilliSatoshi}
|
||||
import grizzled.slf4j.Logging
|
||||
|
||||
import scala.collection.immutable.Queue
|
||||
import scala.compat.Platform
|
||||
|
||||
class SqliteAuditDb(sqlite: Connection) extends AuditDb with Logging {
|
||||
|
||||
|
@ -44,7 +42,8 @@ class SqliteAuditDb(sqlite: Connection) extends AuditDb with Logging {
|
|||
using(sqlite.createStatement(), inTransaction = true) { statement =>
|
||||
|
||||
def migration12(statement: Statement): Int = {
|
||||
statement.executeUpdate(s"ALTER TABLE sent ADD id BLOB DEFAULT '${ChannelCodecs.UNKNOWN_UUID.toString}' NOT NULL")
|
||||
val ZERO_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
statement.executeUpdate(s"ALTER TABLE sent ADD id BLOB DEFAULT '${ZERO_UUID.toString}' NOT NULL")
|
||||
}
|
||||
|
||||
def migration23(statement: Statement): Int = {
|
||||
|
@ -311,21 +310,23 @@ class SqliteAuditDb(sqlite: Connection) extends AuditDb with Logging {
|
|||
val updated = relayedTo.map(channelId => (channelId, relayedByChannelId.getOrElse(channelId, Nil) :+ e)).toMap
|
||||
relayedByChannelId ++ updated
|
||||
}
|
||||
networkFees.map {
|
||||
case (channelId, networkFee) =>
|
||||
val r = relayed.getOrElse(channelId, Nil)
|
||||
val paymentCount = r.length
|
||||
if (paymentCount == 0) {
|
||||
Stats(channelId, 0 sat, 0, 0 sat, networkFee)
|
||||
} else {
|
||||
val avgPaymentAmount = r.map(_.amountOut).sum / paymentCount
|
||||
val relayFee = r.map {
|
||||
case c: ChannelPaymentRelayed => c.amountIn - c.amountOut
|
||||
case t: TrampolinePaymentRelayed => (t.amountIn - t.amountOut) * t.outgoing.count(_.channelId == channelId) / t.outgoing.length
|
||||
}.sum
|
||||
Stats(channelId, avgPaymentAmount.truncateToSatoshi, paymentCount, relayFee.truncateToSatoshi, networkFee)
|
||||
}
|
||||
}.toSeq
|
||||
// Channels opened by our peers won't have any entry in the network_fees table, but we still want to compute stats for them.
|
||||
val allChannels = networkFees.keySet ++ relayed.keySet
|
||||
allChannels.map(channelId => {
|
||||
val networkFee = networkFees.getOrElse(channelId, 0 sat)
|
||||
val r = relayed.getOrElse(channelId, Nil)
|
||||
val paymentCount = r.length
|
||||
if (paymentCount == 0) {
|
||||
Stats(channelId, 0 sat, 0, 0 sat, networkFee)
|
||||
} else {
|
||||
val avgPaymentAmount = r.map(_.amountOut).sum / paymentCount
|
||||
val relayFee = r.map {
|
||||
case c: ChannelPaymentRelayed => c.amountIn - c.amountOut
|
||||
case t: TrampolinePaymentRelayed => (t.amountIn - t.amountOut) * t.outgoing.count(_.channelId == channelId) / t.outgoing.length
|
||||
}.sum
|
||||
Stats(channelId, avgPaymentAmount.truncateToSatoshi, paymentCount, relayFee.truncateToSatoshi, networkFee)
|
||||
}
|
||||
}).toSeq
|
||||
}
|
||||
|
||||
// used by mobile apps
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright 2020 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.db.sqlite
|
||||
|
||||
import java.sql.Connection
|
||||
|
||||
import fr.acinq.eclair.blockchain.fee.FeeratesPerKB
|
||||
import fr.acinq.eclair.db.FeeratesDb
|
||||
|
||||
|
||||
class SqliteFeeratesDb(sqlite: Connection) extends FeeratesDb {
|
||||
|
||||
import SqliteUtils._
|
||||
|
||||
val DB_NAME = "feerates"
|
||||
val CURRENT_VERSION = 1
|
||||
|
||||
using(sqlite.createStatement(), inTransaction = true) { statement =>
|
||||
getVersion(statement, DB_NAME, CURRENT_VERSION) match {
|
||||
case CURRENT_VERSION =>
|
||||
// Create feerates table. Rates are in kb.
|
||||
statement.executeUpdate(
|
||||
"""
|
||||
|CREATE TABLE IF NOT EXISTS feerates_per_kb (
|
||||
|rate_block_1 INTEGER NOT NULL, rate_blocks_2 INTEGER NOT NULL, rate_blocks_6 INTEGER NOT NULL, rate_blocks_12 INTEGER NOT NULL, rate_blocks_36 INTEGER NOT NULL, rate_blocks_72 INTEGER NOT NULL, rate_blocks_144 INTEGER NOT NULL,
|
||||
|timestamp INTEGER NOT NULL)""".stripMargin)
|
||||
case unknownVersion => throw new RuntimeException(s"Unknown version of DB $DB_NAME found, version=$unknownVersion")
|
||||
}
|
||||
}
|
||||
|
||||
override def addOrUpdateFeerates(feeratesPerKB: FeeratesPerKB): Unit = {
|
||||
using(sqlite.prepareStatement("UPDATE feerates_per_kb SET rate_block_1=?, rate_blocks_2=?, rate_blocks_6=?, rate_blocks_12=?, rate_blocks_36=?, rate_blocks_72=?, rate_blocks_144=?, timestamp=?")) { update =>
|
||||
update.setLong(1, feeratesPerKB.block_1)
|
||||
update.setLong(2, feeratesPerKB.blocks_2)
|
||||
update.setLong(3, feeratesPerKB.blocks_6)
|
||||
update.setLong(4, feeratesPerKB.blocks_12)
|
||||
update.setLong(5, feeratesPerKB.blocks_36)
|
||||
update.setLong(6, feeratesPerKB.blocks_72)
|
||||
update.setLong(7, feeratesPerKB.blocks_144)
|
||||
update.setLong(8, System.currentTimeMillis())
|
||||
if (update.executeUpdate() == 0) {
|
||||
using(sqlite.prepareStatement("INSERT INTO feerates_per_kb VALUES (?, ?, ?, ?, ?, ?, ?, ?)")) { insert =>
|
||||
insert.setLong(1, feeratesPerKB.block_1)
|
||||
insert.setLong(2, feeratesPerKB.blocks_2)
|
||||
insert.setLong(3, feeratesPerKB.blocks_6)
|
||||
insert.setLong(4, feeratesPerKB.blocks_12)
|
||||
insert.setLong(5, feeratesPerKB.blocks_36)
|
||||
insert.setLong(6, feeratesPerKB.blocks_72)
|
||||
insert.setLong(7, feeratesPerKB.blocks_144)
|
||||
insert.setLong(8, System.currentTimeMillis())
|
||||
insert.executeUpdate()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override def getFeerates(): Option[FeeratesPerKB] = {
|
||||
using(sqlite.prepareStatement("SELECT rate_block_1, rate_blocks_2, rate_blocks_6, rate_blocks_12, rate_blocks_36, rate_blocks_72, rate_blocks_144 FROM feerates_per_kb")) { statement =>
|
||||
val rs = statement.executeQuery()
|
||||
if (rs.next()) {
|
||||
Some(FeeratesPerKB(
|
||||
block_1 = rs.getLong("rate_block_1"),
|
||||
blocks_2 = rs.getLong("rate_blocks_2"),
|
||||
blocks_6 = rs.getLong("rate_blocks_6"),
|
||||
blocks_12 = rs.getLong("rate_blocks_12"),
|
||||
blocks_36 = rs.getLong("rate_blocks_36"),
|
||||
blocks_72 = rs.getLong("rate_blocks_72"),
|
||||
blocks_144 = rs.getLong("rate_blocks_144")))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used by mobile apps
|
||||
override def close(): Unit = sqlite.close()
|
||||
}
|
|
@ -285,7 +285,7 @@ class Peer(val nodeParams: NodeParams, remoteNodeId: PublicKey, watcher: ActorRe
|
|||
|
||||
def createNewChannel(nodeParams: NodeParams, funder: Boolean, fundingAmount: Satoshi, origin_opt: Option[ActorRef], channelVersion: ChannelVersion): (ActorRef, LocalParams) = {
|
||||
val (finalScript, localPaymentBasepoint) = channelVersion match {
|
||||
case v if v.isSet(ChannelVersion.USE_STATIC_REMOTEKEY_BIT) =>
|
||||
case v if v.hasStaticRemotekey =>
|
||||
val walletKey = Helpers.getWalletPaymentBasepoint(wallet)
|
||||
(Script.write(Script.pay2wpkh(walletKey)), Some(walletKey))
|
||||
case _ =>
|
||||
|
@ -404,7 +404,7 @@ object Peer {
|
|||
makeChannelParams(nodeParams, defaultFinalScriptPubkey, localPaymentBasepoint, isFunder, fundingAmount, fundingKeyPath)
|
||||
}
|
||||
|
||||
def makeChannelParams(nodeParams: NodeParams, defaultFinalScriptPubkey: ByteVector, localPaymentBasepoint: Option[PublicKey], isFunder: Boolean, fundingAmount: Satoshi, fundingKeyPath: DeterministicWallet.KeyPath): LocalParams = {
|
||||
def makeChannelParams(nodeParams: NodeParams, defaultFinalScriptPubkey: ByteVector, staticPaymentBasepoint: Option[PublicKey], isFunder: Boolean, fundingAmount: Satoshi, fundingKeyPath: DeterministicWallet.KeyPath): LocalParams = {
|
||||
LocalParams(
|
||||
nodeParams.nodeId,
|
||||
fundingKeyPath,
|
||||
|
@ -416,7 +416,7 @@ object Peer {
|
|||
maxAcceptedHtlcs = nodeParams.maxAcceptedHtlcs,
|
||||
isFunder = isFunder,
|
||||
defaultFinalScriptPubKey = defaultFinalScriptPubkey,
|
||||
localPaymentBasepoint = localPaymentBasepoint,
|
||||
staticPaymentBasepoint = staticPaymentBasepoint,
|
||||
features = nodeParams.features)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -378,7 +378,17 @@ object Graph {
|
|||
* @param capacity channel capacity
|
||||
* @param balance_opt (optional) available balance that can be sent through this edge
|
||||
*/
|
||||
case class GraphEdge(desc: ChannelDesc, update: ChannelUpdate, capacity: Satoshi, balance_opt: Option[MilliSatoshi])
|
||||
case class GraphEdge(desc: ChannelDesc, update: ChannelUpdate, capacity: Satoshi, balance_opt: Option[MilliSatoshi]) {
|
||||
|
||||
def maxHtlcAmount(reservedCapacity: MilliSatoshi): MilliSatoshi = Seq(
|
||||
balance_opt.map(balance => balance - reservedCapacity),
|
||||
update.htlcMaximumMsat,
|
||||
Some(capacity.toMilliSatoshi - reservedCapacity)
|
||||
).flatten.min.max(0 msat)
|
||||
|
||||
def fee(amount: MilliSatoshi): MilliSatoshi = nodeFee(update.feeBaseMsat, update.feeProportionalMillionths, amount)
|
||||
|
||||
}
|
||||
|
||||
/** A graph data structure that uses an adjacency list, stores the incoming edges of the neighbors */
|
||||
case class DirectedGraph(private val vertices: Map[PublicKey, List[GraphEdge]]) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import fr.acinq.eclair.wire.ChannelUpdate
|
|||
import fr.acinq.eclair.{ShortChannelId, _}
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.{Failure, Random, Success, Try}
|
||||
|
||||
|
@ -144,22 +145,24 @@ object RouteCalculation {
|
|||
ageFactor = routerConf.searchRatioChannelAge,
|
||||
capacityFactor = routerConf.searchRatioChannelCapacity
|
||||
))
|
||||
}
|
||||
},
|
||||
mpp = MultiPartParams(routerConf.mppMinPartAmount, routerConf.mppMaxParts)
|
||||
)
|
||||
|
||||
/**
|
||||
* Find a route in the graph between localNodeId and targetNodeId, returns the route.
|
||||
* Will perform a k-shortest path selection given the @param numRoutes and randomly select one of the result.
|
||||
*
|
||||
* @param g graph of the whole network
|
||||
* @param localNodeId sender node (payer)
|
||||
* @param targetNodeId target node (final recipient)
|
||||
* @param amount the amount that the target node should receive
|
||||
* @param maxFee the maximum fee of a resulting route
|
||||
* @param numRoutes the number of routes to find
|
||||
* @param extraEdges a set of extra edges we want to CONSIDER during the search
|
||||
* @param ignoredEdges a set of extra edges we want to IGNORE during the search
|
||||
* @param routeParams a set of parameters that can restrict the route search
|
||||
* @param g graph of the whole network
|
||||
* @param localNodeId sender node (payer)
|
||||
* @param targetNodeId target node (final recipient)
|
||||
* @param amount the amount that the target node should receive
|
||||
* @param maxFee the maximum fee of a resulting route
|
||||
* @param numRoutes the number of routes to find
|
||||
* @param extraEdges a set of extra edges we want to CONSIDER during the search
|
||||
* @param ignoredEdges a set of extra edges we want to IGNORE during the search
|
||||
* @param ignoredVertices a set of extra vertices we want to IGNORE during the search
|
||||
* @param routeParams a set of parameters that can restrict the route search
|
||||
* @return the computed routes to the destination @param targetNodeId
|
||||
*/
|
||||
def findRoute(g: DirectedGraph,
|
||||
|
@ -219,4 +222,139 @@ object RouteCalculation {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a multi-part route in the graph between localNodeId and targetNodeId.
|
||||
*
|
||||
* @param g graph of the whole network
|
||||
* @param localNodeId sender node (payer)
|
||||
* @param targetNodeId target node (final recipient)
|
||||
* @param amount the amount that the target node should receive
|
||||
* @param maxFee the maximum fee of a resulting route
|
||||
* @param extraEdges a set of extra edges we want to CONSIDER during the search
|
||||
* @param ignoredEdges a set of extra edges we want to IGNORE during the search
|
||||
* @param ignoredVertices a set of extra vertices we want to IGNORE during the search
|
||||
* @param pendingHtlcs a list of htlcs that have already been sent for that multi-part payment (used to avoid finding conflicting HTLCs)
|
||||
* @param routeParams a set of parameters that can restrict the route search
|
||||
* @return a set of disjoint routes to the destination @param targetNodeId with the payment amount split between them
|
||||
*/
|
||||
def findMultiPartRoute(g: DirectedGraph,
|
||||
localNodeId: PublicKey,
|
||||
targetNodeId: PublicKey,
|
||||
amount: MilliSatoshi,
|
||||
maxFee: MilliSatoshi,
|
||||
extraEdges: Set[GraphEdge] = Set.empty,
|
||||
ignoredEdges: Set[ChannelDesc] = Set.empty,
|
||||
ignoredVertices: Set[PublicKey] = Set.empty,
|
||||
pendingHtlcs: Seq[Route] = Nil,
|
||||
routeParams: RouteParams,
|
||||
currentBlockHeight: Long): Try[Seq[Route]] = Try {
|
||||
val result = findMultiPartRouteInternal(g, localNodeId, targetNodeId, amount, maxFee, extraEdges, ignoredEdges, ignoredVertices, pendingHtlcs, routeParams, currentBlockHeight) match {
|
||||
case Right(routes) => Right(routes)
|
||||
case Left(RouteNotFound) if routeParams.randomize =>
|
||||
// If we couldn't find a randomized solution, fallback to a deterministic one.
|
||||
findMultiPartRouteInternal(g, localNodeId, targetNodeId, amount, maxFee, extraEdges, ignoredEdges, ignoredVertices, pendingHtlcs, routeParams.copy(randomize = false), currentBlockHeight)
|
||||
case Left(ex) => Left(ex)
|
||||
}
|
||||
result match {
|
||||
case Right(routes) => routes
|
||||
case Left(ex) => return Failure(ex)
|
||||
}
|
||||
}
|
||||
|
||||
private def findMultiPartRouteInternal(g: DirectedGraph,
|
||||
localNodeId: PublicKey,
|
||||
targetNodeId: PublicKey,
|
||||
amount: MilliSatoshi,
|
||||
maxFee: MilliSatoshi,
|
||||
extraEdges: Set[GraphEdge] = Set.empty,
|
||||
ignoredEdges: Set[ChannelDesc] = Set.empty,
|
||||
ignoredVertices: Set[PublicKey] = Set.empty,
|
||||
pendingHtlcs: Seq[Route] = Nil,
|
||||
routeParams: RouteParams,
|
||||
currentBlockHeight: Long): Either[RouterException, Seq[Route]] = {
|
||||
// We use Yen's k-shortest paths to find many paths for chunks of the total amount.
|
||||
val numRoutes = {
|
||||
val directChannelsCount = g.getEdgesBetween(localNodeId, targetNodeId).length
|
||||
routeParams.mpp.maxParts.max(directChannelsCount) // if we have direct channels to the target, we can use them all
|
||||
}
|
||||
val routeAmount = routeParams.mpp.minPartAmount.min(amount)
|
||||
findRouteInternal(g, localNodeId, targetNodeId, routeAmount, maxFee, numRoutes, extraEdges, ignoredEdges, ignoredVertices, routeParams, currentBlockHeight) match {
|
||||
case Right(routes) =>
|
||||
// We use these shortest paths to find a set of non-conflicting HTLCs that send the total amount.
|
||||
split(amount, mutable.Queue(routes: _*), initializeUsedCapacity(pendingHtlcs), routeParams) match {
|
||||
case Right(routes) if validateMultiPartRoute(amount, maxFee, routes) => Right(routes)
|
||||
case _ => Left(RouteNotFound)
|
||||
}
|
||||
case Left(ex) => Left(ex)
|
||||
}
|
||||
}
|
||||
|
||||
@tailrec
|
||||
private def split(amount: MilliSatoshi, paths: mutable.Queue[Graph.WeightedPath], usedCapacity: mutable.Map[ShortChannelId, MilliSatoshi], routeParams: RouteParams, selectedRoutes: Seq[Route] = Nil): Either[RouterException, Seq[Route]] = {
|
||||
if (amount == 0.msat) {
|
||||
Right(selectedRoutes)
|
||||
} else if (paths.isEmpty) {
|
||||
Left(RouteNotFound)
|
||||
} else {
|
||||
val current = paths.dequeue()
|
||||
val candidate = computeRouteMaxAmount(current.path, usedCapacity)
|
||||
if (candidate.amount < routeParams.mpp.minPartAmount.min(amount)) {
|
||||
// this route doesn't have enough capacity left: we remove it and continue.
|
||||
split(amount, paths, usedCapacity, routeParams, selectedRoutes)
|
||||
} else {
|
||||
val route = if (routeParams.randomize) {
|
||||
// randomly choose the amount to be between 20% and 100% of the available capacity.
|
||||
val randomizedAmount = candidate.amount * ((20d + Random.nextInt(81)) / 100)
|
||||
if (randomizedAmount < routeParams.mpp.minPartAmount) {
|
||||
candidate.copy(amount = routeParams.mpp.minPartAmount.min(amount))
|
||||
} else {
|
||||
candidate.copy(amount = randomizedAmount.min(amount))
|
||||
}
|
||||
} else {
|
||||
candidate.copy(amount = candidate.amount.min(amount))
|
||||
}
|
||||
updateUsedCapacity(route, usedCapacity)
|
||||
// NB: we re-enqueue the current path, it may still have capacity for a second HTLC.
|
||||
paths.enqueue(current)
|
||||
split(amount - route.amount, paths, usedCapacity, routeParams, route +: selectedRoutes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Compute the maximum amount that we can send through the given route. */
|
||||
private def computeRouteMaxAmount(route: Seq[GraphEdge], usedCapacity: mutable.Map[ShortChannelId, MilliSatoshi]): Route = {
|
||||
val firstHopMaxAmount = route.head.maxHtlcAmount(usedCapacity.getOrElse(route.head.update.shortChannelId, 0 msat))
|
||||
val amount = route.drop(1).foldLeft(firstHopMaxAmount) { case (amount, edge) =>
|
||||
// We compute fees going forward instead of backwards. That means we will slightly overestimate the fees of some
|
||||
// edges, but we will always stay inside the capacity bounds we computed.
|
||||
val amountMinusFees = amount - edge.fee(amount)
|
||||
val edgeMaxAmount = edge.maxHtlcAmount(usedCapacity.getOrElse(edge.update.shortChannelId, 0 msat))
|
||||
amountMinusFees.min(edgeMaxAmount)
|
||||
}
|
||||
Route(amount.max(0 msat), route.map(graphEdgeToHop))
|
||||
}
|
||||
|
||||
/** Initialize known used capacity based on pending HTLCs. */
|
||||
private def initializeUsedCapacity(pendingHtlcs: Seq[Route]): mutable.Map[ShortChannelId, MilliSatoshi] = {
|
||||
val usedCapacity = mutable.Map.empty[ShortChannelId, MilliSatoshi]
|
||||
// We always skip the first hop: since they are local channels, we already take into account those sent HTLCs in the
|
||||
// channel balance (which overrides the channel capacity in route calculation).
|
||||
pendingHtlcs.filter(_.hops.length > 1).foreach(route => updateUsedCapacity(route.copy(hops = route.hops.tail), usedCapacity))
|
||||
usedCapacity
|
||||
}
|
||||
|
||||
/** Update used capacity by taking into account an HTLC sent to the given route. */
|
||||
private def updateUsedCapacity(route: Route, usedCapacity: mutable.Map[ShortChannelId, MilliSatoshi]): Unit = {
|
||||
route.hops.reverse.foldLeft(route.amount) { case (amount, hop) =>
|
||||
usedCapacity.update(hop.lastUpdate.shortChannelId, usedCapacity.getOrElse(hop.lastUpdate.shortChannelId, 0 msat) + amount)
|
||||
amount + hop.fee(amount)
|
||||
}
|
||||
}
|
||||
|
||||
private def validateMultiPartRoute(amount: MilliSatoshi, maxFee: MilliSatoshi, routes: Seq[Route]): Boolean = {
|
||||
val amountOk = routes.map(_.amount).sum == amount
|
||||
val feeOk = routes.map(_.fee).sum <= maxFee
|
||||
amountOk && feeOk
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -241,7 +241,9 @@ object Router {
|
|||
searchHeuristicsEnabled: Boolean,
|
||||
searchRatioCltv: Double,
|
||||
searchRatioChannelAge: Double,
|
||||
searchRatioChannelCapacity: Double)
|
||||
searchRatioChannelCapacity: Double,
|
||||
mppMinPartAmount: MilliSatoshi,
|
||||
mppMaxParts: Int)
|
||||
|
||||
// @formatter:off
|
||||
case class ChannelDesc(shortChannelId: ShortChannelId, a: PublicKey, b: PublicKey)
|
||||
|
@ -330,7 +332,9 @@ object Router {
|
|||
override def fee(amount: MilliSatoshi): MilliSatoshi = fee
|
||||
}
|
||||
|
||||
case class RouteParams(randomize: Boolean, maxFeeBase: MilliSatoshi, maxFeePct: Double, routeMaxLength: Int, routeMaxCltv: CltvExpiryDelta, ratios: Option[WeightRatios]) {
|
||||
case class MultiPartParams(minPartAmount: MilliSatoshi, maxParts: Int)
|
||||
|
||||
case class RouteParams(randomize: Boolean, maxFeeBase: MilliSatoshi, maxFeePct: Double, routeMaxLength: Int, routeMaxCltv: CltvExpiryDelta, ratios: Option[WeightRatios], mpp: MultiPartParams) {
|
||||
def getMaxFee(amount: MilliSatoshi): MilliSatoshi = {
|
||||
// The payment fee must satisfy either the flat fee or the percentage fee, not necessarily both.
|
||||
maxFeeBase.max(amount * maxFeePct)
|
||||
|
@ -351,6 +355,10 @@ object Router {
|
|||
case class Route(amount: MilliSatoshi, hops: Seq[ChannelHop], allowEmpty: Boolean = false) {
|
||||
require(allowEmpty || hops.nonEmpty, "route cannot be empty")
|
||||
val length = hops.length
|
||||
lazy val fee: MilliSatoshi = {
|
||||
val amountToSend = hops.drop(1).reverse.foldLeft(amount) { case (amount1, hop) => amount1 + hop.fee(amount1) }
|
||||
amountToSend - amount
|
||||
}
|
||||
|
||||
/** This method retrieves the channel update that we used when we built the route. */
|
||||
def getChannelUpdateForNode(nodeId: PublicKey): Option[ChannelUpdate] = hops.find(_.nodeId == nodeId).map(_.lastUpdate)
|
||||
|
|
|
@ -16,11 +16,9 @@
|
|||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.util.UUID
|
||||
|
||||
import akka.actor.ActorRef
|
||||
import fr.acinq.bitcoin.DeterministicWallet.{ExtendedPrivateKey, KeyPath}
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64, Crypto, OutPoint, Transaction, TxOut}
|
||||
import fr.acinq.bitcoin.{ByteVector32, OutPoint, Transaction, TxOut}
|
||||
import fr.acinq.eclair.channel._
|
||||
import fr.acinq.eclair.crypto.ShaChain
|
||||
import fr.acinq.eclair.payment.relay.Origin
|
||||
|
@ -29,19 +27,19 @@ import fr.acinq.eclair.transactions._
|
|||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import grizzled.slf4j.Logging
|
||||
import scodec.bits.BitVector
|
||||
import scodec.codecs._
|
||||
import scodec.{Attempt, Codec}
|
||||
import shapeless.{HNil, ::}
|
||||
|
||||
import scala.compat.Platform
|
||||
import scala.concurrent.duration._
|
||||
|
||||
/**
|
||||
* Created by PM on 02/06/2017.
|
||||
*/
|
||||
object ChannelCodecs extends Logging {
|
||||
|
||||
/**
|
||||
* All LN protocol message must be stored as length-delimited, because they may have arbitrary trailing data
|
||||
*/
|
||||
def lengthDelimited[T](codec: Codec[T]): Codec[T] = variableSizeBytesLong(varintoverflow, codec)
|
||||
|
||||
val keyPathCodec: Codec[KeyPath] = ("path" | listOfN(uint16, uint32)).xmap[KeyPath](l => new KeyPath(l), keyPath => keyPath.path.toList).as[KeyPath]
|
||||
|
||||
val extendedPrivateKeyCodec: Codec[ExtendedPrivateKey] = (
|
||||
|
@ -51,14 +49,12 @@ object ChannelCodecs extends Logging {
|
|||
("path" | keyPathCodec) ::
|
||||
("parent" | int64)).as[ExtendedPrivateKey]
|
||||
|
||||
val channelVersionCodec: Codec[ChannelVersion] = discriminatorWithDefault[ChannelVersion](
|
||||
discriminator = discriminated[ChannelVersion].by(byte)
|
||||
.typecase(0x01, bits(ChannelVersion.LENGTH_BITS).as[ChannelVersion])
|
||||
// NB: 0x02 and 0x03 are *reserved* for backward compatibility reasons
|
||||
,
|
||||
fallback = provide(ChannelVersion.ZEROES) // README: DO NOT CHANGE THIS !! old channels don't have a channel version
|
||||
// field and don't support additional features which is why all bits are set to 0.
|
||||
)
|
||||
val channelVersionCodec: Codec[ChannelVersion] = bits(ChannelVersion.LENGTH_BITS).as[ChannelVersion]
|
||||
|
||||
/**
|
||||
* byte-aligned boolean codec
|
||||
*/
|
||||
val bool8: Codec[Boolean] = bool(8)
|
||||
|
||||
def localParamsCodec(channelVersion: ChannelVersion): Codec[LocalParams] = (
|
||||
("nodeId" | publicKey) ::
|
||||
|
@ -69,9 +65,9 @@ object ChannelCodecs extends Logging {
|
|||
("htlcMinimum" | millisatoshi) ::
|
||||
("toSelfDelay" | cltvExpiryDelta) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
("isFunder" | bool) ::
|
||||
("defaultFinalScriptPubKey" | varsizebinarydata) ::
|
||||
("localPaymentBasepoint" | optional(provide(channelVersion.isSet(ChannelVersion.USE_STATIC_REMOTEKEY_BIT)), publicKey)) ::
|
||||
("isFunder" | bool8) ::
|
||||
("defaultFinalScriptPubKey" | lengthDelimited(bytes)) ::
|
||||
("localPaymentBasepoint" | optional(provide(channelVersion.hasStaticRemotekey), publicKey)) ::
|
||||
("features" | combinedFeaturesCodec)).as[LocalParams]
|
||||
|
||||
val remoteParamsCodec: Codec[RemoteParams] = (
|
||||
|
@ -89,14 +85,9 @@ object ChannelCodecs extends Logging {
|
|||
("htlcBasepoint" | publicKey) ::
|
||||
("features" | combinedFeaturesCodec)).as[RemoteParams]
|
||||
|
||||
val htlcCodec: Codec[DirectedHtlc] = discriminated[DirectedHtlc].by(bool)
|
||||
.typecase(true, updateAddHtlcCodec.as[IncomingHtlc])
|
||||
.typecase(false, updateAddHtlcCodec.as[OutgoingHtlc])
|
||||
|
||||
def setCodec[T](codec: Codec[T]): Codec[Set[T]] = Codec[Set[T]](
|
||||
(elems: Set[T]) => listOfN(uint16, codec).encode(elems.toList),
|
||||
(wire: BitVector) => listOfN(uint16, codec).decode(wire).map(_.map(_.toSet))
|
||||
)
|
||||
val htlcCodec: Codec[DirectedHtlc] = discriminated[DirectedHtlc].by(bool8)
|
||||
.typecase(true, lengthDelimited(updateAddHtlcCodec).as[IncomingHtlc])
|
||||
.typecase(false, lengthDelimited(updateAddHtlcCodec).as[OutgoingHtlc])
|
||||
|
||||
val commitmentSpecCodec: Codec[CommitmentSpec] = (
|
||||
("htlcs" | setCodec(htlcCodec)) ::
|
||||
|
@ -104,16 +95,16 @@ object ChannelCodecs extends Logging {
|
|||
("toLocal" | millisatoshi) ::
|
||||
("toRemote" | millisatoshi)).as[CommitmentSpec]
|
||||
|
||||
val outPointCodec: Codec[OutPoint] = variableSizeBytes(uint16, bytes.xmap(d => OutPoint.read(d.toArray), d => OutPoint.write(d)))
|
||||
val outPointCodec: Codec[OutPoint] = lengthDelimited(bytes.xmap(d => OutPoint.read(d.toArray), d => OutPoint.write(d)))
|
||||
|
||||
val txOutCodec: Codec[TxOut] = variableSizeBytes(uint16, bytes.xmap(d => TxOut.read(d.toArray), d => TxOut.write(d)))
|
||||
val txOutCodec: Codec[TxOut] = lengthDelimited(bytes.xmap(d => TxOut.read(d.toArray), d => TxOut.write(d)))
|
||||
|
||||
val txCodec: Codec[Transaction] = variableSizeBytes(uint16, bytes.xmap(d => Transaction.read(d.toArray), d => Transaction.write(d)))
|
||||
val txCodec: Codec[Transaction] = lengthDelimited(bytes.xmap(d => Transaction.read(d.toArray), d => Transaction.write(d)))
|
||||
|
||||
val inputInfoCodec: Codec[InputInfo] = (
|
||||
("outPoint" | outPointCodec) ::
|
||||
("txOut" | txOutCodec) ::
|
||||
("redeemScript" | varsizebinarydata)).as[InputInfo]
|
||||
("redeemScript" | lengthDelimited(bytes))).as[InputInfo]
|
||||
|
||||
val txWithInputInfoCodec: Codec[TransactionWithInputInfo] = discriminated[TransactionWithInputInfo].by(uint16)
|
||||
.typecase(0x01, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[CommitTx])
|
||||
|
@ -127,19 +118,10 @@ object ChannelCodecs extends Logging {
|
|||
.typecase(0x09, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[HtlcPenaltyTx])
|
||||
.typecase(0x10, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClosingTx])
|
||||
|
||||
// this is a backward compatible codec (we used to store the sig as DER encoded), now we store it as 64-bytes
|
||||
val sig64OrDERCodec: Codec[ByteVector64] = Codec[ByteVector64](
|
||||
(value: ByteVector64) => bytes(64).encode(value),
|
||||
(wire: BitVector) => bytes.decode(wire).map(_.map {
|
||||
case bin64 if bin64.size == 64 => ByteVector64(bin64)
|
||||
case der => Crypto.der2compact(der)
|
||||
})
|
||||
)
|
||||
|
||||
val htlcTxAndSigsCodec: Codec[HtlcTxAndSigs] = (
|
||||
("txinfo" | txWithInputInfoCodec) ::
|
||||
("localSig" | variableSizeBytes(uint16, sig64OrDERCodec)) :: // we store as variable length for historical purposes (we used to store as DER encoded)
|
||||
("remoteSig" | variableSizeBytes(uint16, sig64OrDERCodec))).as[HtlcTxAndSigs]
|
||||
("localSig" | lengthDelimited(bytes64)) :: // we store as variable length for historical purposes (we used to store as DER encoded)
|
||||
("remoteSig" | lengthDelimited(bytes64))).as[HtlcTxAndSigs]
|
||||
|
||||
val publishableTxsCodec: Codec[PublishableTxs] = (
|
||||
("commitTx" | (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[CommitTx]) ::
|
||||
|
@ -156,7 +138,7 @@ object ChannelCodecs extends Logging {
|
|||
("txid" | bytes32) ::
|
||||
("remotePerCommitmentPoint" | publicKey)).as[RemoteCommit]
|
||||
|
||||
val updateMessageCodec: Codec[UpdateMessage] = lightningMessageCodec.narrow(f => Attempt.successful(f.asInstanceOf[UpdateMessage]), g => g)
|
||||
val updateMessageCodec: Codec[UpdateMessage] = lengthDelimited(lightningMessageCodec.narrow[UpdateMessage](f => Attempt.successful(f.asInstanceOf[UpdateMessage]), g => g))
|
||||
|
||||
val localChangesCodec: Codec[LocalChanges] = (
|
||||
("proposed" | listOfN(uint16, updateMessageCodec)) ::
|
||||
|
@ -170,9 +152,9 @@ object ChannelCodecs extends Logging {
|
|||
|
||||
val waitingForRevocationCodec: Codec[WaitingForRevocation] = (
|
||||
("nextRemoteCommit" | remoteCommitCodec) ::
|
||||
("sent" | commitSigCodec) ::
|
||||
("sent" | lengthDelimited(commitSigCodec)) ::
|
||||
("sentAfterLocalCommitIndex" | uint64overflow) ::
|
||||
("reSignAsap" | bool)).as[WaitingForRevocation]
|
||||
("reSignAsap" | bool8)).as[WaitingForRevocation]
|
||||
|
||||
val localCodec: Codec[Origin.Local] = (
|
||||
("id" | uuid) ::
|
||||
|
@ -185,33 +167,19 @@ object ChannelCodecs extends Logging {
|
|||
("amountIn" | millisatoshi) ::
|
||||
("amountOut" | millisatoshi)).as[Origin.Relayed]
|
||||
|
||||
// this is for backward compatibility to handle legacy payments that didn't have identifiers
|
||||
val UNKNOWN_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
|
||||
val trampolineRelayedCodec: Codec[Origin.TrampolineRelayed] = (
|
||||
listOfN(uint16, bytes32 ~ int64) ::
|
||||
("sender" | provide(Option.empty[ActorRef]))
|
||||
).as[Origin.TrampolineRelayed]
|
||||
|
||||
val originCodec: Codec[Origin] = discriminated[Origin].by(uint16)
|
||||
.typecase(0x03, localCodec) // backward compatible
|
||||
.typecase(0x01, provide(Origin.Local(UNKNOWN_UUID, None)))
|
||||
.typecase(0x02, relayedCodec)
|
||||
.typecase(0x03, localCodec)
|
||||
.typecase(0x04, trampolineRelayedCodec)
|
||||
|
||||
val originsListCodec: Codec[List[(Long, Origin)]] = listOfN(uint16, int64 ~ originCodec)
|
||||
val originsMapCodec: Codec[Map[Long, Origin]] = mapCodec(int64, originCodec)
|
||||
|
||||
val originsMapCodec: Codec[Map[Long, Origin]] = Codec[Map[Long, Origin]](
|
||||
(map: Map[Long, Origin]) => originsListCodec.encode(map.toList),
|
||||
(wire: BitVector) => originsListCodec.decode(wire).map(_.map(_.toMap))
|
||||
)
|
||||
|
||||
val spentListCodec: Codec[List[(OutPoint, ByteVector32)]] = listOfN(uint16, outPointCodec ~ bytes32)
|
||||
|
||||
val spentMapCodec: Codec[Map[OutPoint, ByteVector32]] = Codec[Map[OutPoint, ByteVector32]](
|
||||
(map: Map[OutPoint, ByteVector32]) => spentListCodec.encode(map.toList),
|
||||
(wire: BitVector) => spentListCodec.decode(wire).map(_.map(_.toMap))
|
||||
)
|
||||
val spentMapCodec: Codec[Map[OutPoint, ByteVector32]] = mapCodec(outPointCodec, bytes32)
|
||||
|
||||
val commitmentsCodec: Codec[Commitments] = (
|
||||
("channelVersion" | channelVersionCodec) >>:~ { channelVersion =>
|
||||
|
@ -225,19 +193,19 @@ object ChannelCodecs extends Logging {
|
|||
("localNextHtlcId" | uint64overflow) ::
|
||||
("remoteNextHtlcId" | uint64overflow) ::
|
||||
("originChannels" | originsMapCodec) ::
|
||||
("remoteNextCommitInfo" | either(bool, waitingForRevocationCodec, publicKey)) ::
|
||||
("remoteNextCommitInfo" | either(bool8, waitingForRevocationCodec, publicKey)) ::
|
||||
("commitInput" | inputInfoCodec) ::
|
||||
("remotePerCommitmentSecrets" | ShaChain.shaChainCodec) ::
|
||||
("remotePerCommitmentSecrets" | byteAligned(ShaChain.shaChainCodec)) ::
|
||||
("channelId" | bytes32)
|
||||
}).as[Commitments]
|
||||
|
||||
val closingTxProposedCodec: Codec[ClosingTxProposed] = (
|
||||
("unsignedTx" | txCodec) ::
|
||||
("localClosingSigned" | closingSignedCodec)).as[ClosingTxProposed]
|
||||
("localClosingSigned" | lengthDelimited(closingSignedCodec))).as[ClosingTxProposed]
|
||||
|
||||
val localCommitPublishedCodec: Codec[LocalCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainDelayedOutputTx" | optional(bool, txCodec)) ::
|
||||
("claimMainDelayedOutputTx" | optional(bool8, txCodec)) ::
|
||||
("htlcSuccessTxs" | listOfN(uint16, txCodec)) ::
|
||||
("htlcTimeoutTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcDelayedTx" | listOfN(uint16, txCodec)) ::
|
||||
|
@ -245,115 +213,68 @@ object ChannelCodecs extends Logging {
|
|||
|
||||
val remoteCommitPublishedCodec: Codec[RemoteCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainOutputTx" | optional(bool, txCodec)) ::
|
||||
("claimMainOutputTx" | optional(bool8, txCodec)) ::
|
||||
("claimHtlcSuccessTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcTimeoutTxs" | listOfN(uint16, txCodec)) ::
|
||||
("spent" | spentMapCodec)).as[RemoteCommitPublished]
|
||||
|
||||
val revokedCommitPublishedCodec: Codec[RevokedCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainOutputTx" | optional(bool, txCodec)) ::
|
||||
("mainPenaltyTx" | optional(bool, txCodec)) ::
|
||||
("claimMainOutputTx" | optional(bool8, txCodec)) ::
|
||||
("mainPenaltyTx" | optional(bool8, txCodec)) ::
|
||||
("htlcPenaltyTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcDelayedPenaltyTxs" | listOfN(uint16, txCodec)) ::
|
||||
("spent" | spentMapCodec)).as[RevokedCommitPublished]
|
||||
|
||||
// this is a decode-only codec compatible with versions 997acee and below, with placeholders for new fields
|
||||
val DATA_WAIT_FOR_FUNDING_CONFIRMED_COMPAT_01_Codec: Codec[DATA_WAIT_FOR_FUNDING_CONFIRMED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | provide[Option[Transaction]](None)) ::
|
||||
("waitingSince" | provide(System.currentTimeMillis.milliseconds.toSeconds)) ::
|
||||
("deferred" | optional(bool, fundingLockedCodec)) ::
|
||||
("lastSent" | either(bool, fundingCreatedCodec, fundingSignedCodec))).as[DATA_WAIT_FOR_FUNDING_CONFIRMED].decodeOnly
|
||||
|
||||
val DATA_WAIT_FOR_FUNDING_CONFIRMED_Codec: Codec[DATA_WAIT_FOR_FUNDING_CONFIRMED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | optional(bool, txCodec)) ::
|
||||
("fundingTx" | optional(bool8, txCodec)) ::
|
||||
("waitingSince" | int64) ::
|
||||
("deferred" | optional(bool, fundingLockedCodec)) ::
|
||||
("lastSent" | either(bool, fundingCreatedCodec, fundingSignedCodec))).as[DATA_WAIT_FOR_FUNDING_CONFIRMED]
|
||||
("deferred" | optional(bool8, lengthDelimited(fundingLockedCodec))) ::
|
||||
("lastSent" | either(bool8, lengthDelimited(fundingCreatedCodec), lengthDelimited(fundingSignedCodec)))).as[DATA_WAIT_FOR_FUNDING_CONFIRMED]
|
||||
|
||||
val DATA_WAIT_FOR_FUNDING_LOCKED_Codec: Codec[DATA_WAIT_FOR_FUNDING_LOCKED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("lastSent" | fundingLockedCodec)).as[DATA_WAIT_FOR_FUNDING_LOCKED]
|
||||
|
||||
// All channel_announcement's written prior to supporting unknown trailing fields had the same fixed size, because
|
||||
// those are the announcements that *we* created and we always used an empty features field, which was the only
|
||||
// variable-length field.
|
||||
val noUnknownFieldsChannelAnnouncementSizeCodec: Codec[Int] = provide(430)
|
||||
|
||||
// We used to ignore unknown trailing fields, and assume that channel_update size was known. This is not true anymore,
|
||||
// so we need to tell the codec where to stop, otherwise all the remaining part of the data will be decoded as unknown
|
||||
// fields. Fortunately, we can easily tell what size the channel_update will be.
|
||||
val noUnknownFieldsChannelUpdateSizeCodec: Codec[Int] = peek( // we need to take a peek at a specific byte to know what size the message will be, and then rollback to read the full message
|
||||
ignore(8 * (64 + 32 + 8 + 4)) ~> // we skip the first fields: signature + chain_hash + short_channel_id + timestamp
|
||||
byte // this is the messageFlags byte
|
||||
)
|
||||
.map(messageFlags => if ((messageFlags & 1) != 0) 136 else 128) // depending on the value of option_channel_htlc_max, size will be 128B or 136B
|
||||
.decodeOnly // this is for compat, we only need to decode
|
||||
|
||||
// this is a decode-only codec compatible with versions 9afb26e and below
|
||||
val DATA_NORMAL_COMPAT_03_Codec: Codec[DATA_NORMAL] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("buried" | bool) ::
|
||||
("channelAnnouncement" | optional(bool, variableSizeBytes(noUnknownFieldsChannelAnnouncementSizeCodec, channelAnnouncementCodec))) ::
|
||||
("channelUpdate" | variableSizeBytes(noUnknownFieldsChannelUpdateSizeCodec, channelUpdateCodec)) ::
|
||||
("localShutdown" | optional(bool, shutdownCodec)) ::
|
||||
("remoteShutdown" | optional(bool, shutdownCodec))).as[DATA_NORMAL].decodeOnly
|
||||
("lastSent" | lengthDelimited(fundingLockedCodec))).as[DATA_WAIT_FOR_FUNDING_LOCKED]
|
||||
|
||||
val DATA_NORMAL_Codec: Codec[DATA_NORMAL] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("buried" | bool) ::
|
||||
("channelAnnouncement" | optional(bool, variableSizeBytes(uint16, channelAnnouncementCodec))) ::
|
||||
("channelUpdate" | variableSizeBytes(uint16, channelUpdateCodec)) ::
|
||||
("localShutdown" | optional(bool, shutdownCodec)) ::
|
||||
("remoteShutdown" | optional(bool, shutdownCodec))).as[DATA_NORMAL]
|
||||
("buried" | bool8) ::
|
||||
("channelAnnouncement" | optional(bool8, lengthDelimited(channelAnnouncementCodec))) ::
|
||||
("channelUpdate" | lengthDelimited(channelUpdateCodec)) ::
|
||||
("localShutdown" | optional(bool8, lengthDelimited(shutdownCodec))) ::
|
||||
("remoteShutdown" | optional(bool8, lengthDelimited(shutdownCodec)))).as[DATA_NORMAL]
|
||||
|
||||
val DATA_SHUTDOWN_Codec: Codec[DATA_SHUTDOWN] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("localShutdown" | shutdownCodec) ::
|
||||
("remoteShutdown" | shutdownCodec)).as[DATA_SHUTDOWN]
|
||||
("localShutdown" | lengthDelimited(shutdownCodec)) ::
|
||||
("remoteShutdown" | lengthDelimited(shutdownCodec))).as[DATA_SHUTDOWN]
|
||||
|
||||
val DATA_NEGOTIATING_Codec: Codec[DATA_NEGOTIATING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("localShutdown" | shutdownCodec) ::
|
||||
("remoteShutdown" | shutdownCodec) ::
|
||||
("closingTxProposed" | listOfN(uint16, listOfN(uint16, closingTxProposedCodec))) ::
|
||||
("bestUnpublishedClosingTx_opt" | optional(bool, txCodec))).as[DATA_NEGOTIATING]
|
||||
|
||||
// this is a decode-only codec compatible with versions 818199e and below, with placeholders for new fields
|
||||
val DATA_CLOSING_COMPAT_06_Codec: Codec[DATA_CLOSING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | provide[Option[Transaction]](None)) ::
|
||||
("waitingSince" | provide(System.currentTimeMillis.milliseconds.toSeconds)) ::
|
||||
("mutualCloseProposed" | listOfN(uint16, txCodec)) ::
|
||||
("mutualClosePublished" | listOfN(uint16, txCodec)) ::
|
||||
("localCommitPublished" | optional(bool, localCommitPublishedCodec)) ::
|
||||
("remoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("nextRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("futureRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("revokedCommitPublished" | listOfN(uint16, revokedCommitPublishedCodec))).as[DATA_CLOSING].decodeOnly
|
||||
("localShutdown" | lengthDelimited(shutdownCodec)) ::
|
||||
("remoteShutdown" | lengthDelimited(shutdownCodec)) ::
|
||||
("closingTxProposed" | listOfN(uint16, listOfN(uint16, lengthDelimited(closingTxProposedCodec)))) ::
|
||||
("bestUnpublishedClosingTx_opt" | optional(bool8, txCodec))).as[DATA_NEGOTIATING]
|
||||
|
||||
val DATA_CLOSING_Codec: Codec[DATA_CLOSING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | optional(bool, txCodec)) ::
|
||||
("fundingTx" | optional(bool8, txCodec)) ::
|
||||
("waitingSince" | int64) ::
|
||||
("mutualCloseProposed" | listOfN(uint16, txCodec)) ::
|
||||
("mutualClosePublished" | listOfN(uint16, txCodec)) ::
|
||||
("localCommitPublished" | optional(bool, localCommitPublishedCodec)) ::
|
||||
("remoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("nextRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("futureRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("localCommitPublished" | optional(bool8, localCommitPublishedCodec)) ::
|
||||
("remoteCommitPublished" | optional(bool8, remoteCommitPublishedCodec)) ::
|
||||
("nextRemoteCommitPublished" | optional(bool8, remoteCommitPublishedCodec)) ::
|
||||
("futureRemoteCommitPublished" | optional(bool8, remoteCommitPublishedCodec)) ::
|
||||
("revokedCommitPublished" | listOfN(uint16, revokedCommitPublishedCodec))).as[DATA_CLOSING]
|
||||
|
||||
val DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT_Codec: Codec[DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("remoteChannelReestablish" | channelReestablishCodec)).as[DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT]
|
||||
|
||||
|
||||
/**
|
||||
* Order matters!!
|
||||
*
|
||||
|
@ -365,16 +286,25 @@ object ChannelCodecs extends Logging {
|
|||
*
|
||||
* More info here: https://github.com/scodec/scodec/issues/122
|
||||
*/
|
||||
val stateDataCodec: Codec[HasCommitments] = ("version" | constant(0x00)) ~> discriminated[HasCommitments].by(uint16)
|
||||
.typecase(0x10, DATA_NORMAL_Codec)
|
||||
.typecase(0x09, DATA_CLOSING_Codec)
|
||||
.typecase(0x08, DATA_WAIT_FOR_FUNDING_CONFIRMED_Codec)
|
||||
.typecase(0x01, DATA_WAIT_FOR_FUNDING_CONFIRMED_COMPAT_01_Codec)
|
||||
.typecase(0x02, DATA_WAIT_FOR_FUNDING_LOCKED_Codec)
|
||||
.typecase(0x03, DATA_NORMAL_COMPAT_03_Codec)
|
||||
.typecase(0x04, DATA_SHUTDOWN_Codec)
|
||||
.typecase(0x05, DATA_NEGOTIATING_Codec)
|
||||
.typecase(0x06, DATA_CLOSING_COMPAT_06_Codec)
|
||||
.typecase(0x07, DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT_Codec)
|
||||
val stateDataCodec: Codec[HasCommitments] = discriminated[HasCommitments].by(byte)
|
||||
.typecase(1, discriminated[HasCommitments].by(uint16)
|
||||
.typecase(0x20, DATA_WAIT_FOR_FUNDING_CONFIRMED_Codec)
|
||||
.typecase(0x21, DATA_WAIT_FOR_FUNDING_LOCKED_Codec)
|
||||
.typecase(0x22, DATA_NORMAL_Codec)
|
||||
.typecase(0x23, DATA_SHUTDOWN_Codec)
|
||||
.typecase(0x24, DATA_NEGOTIATING_Codec)
|
||||
.typecase(0x25, DATA_CLOSING_Codec)
|
||||
.typecase(0x26, DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT_Codec))
|
||||
.typecase(0, discriminated[HasCommitments].by(uint16)
|
||||
.typecase(0x10, LegacyChannelCodecs.DATA_NORMAL_Codec)
|
||||
.typecase(0x09, LegacyChannelCodecs.DATA_CLOSING_Codec)
|
||||
.typecase(0x08, LegacyChannelCodecs.DATA_WAIT_FOR_FUNDING_CONFIRMED_Codec)
|
||||
.typecase(0x01, LegacyChannelCodecs.DATA_WAIT_FOR_FUNDING_CONFIRMED_COMPAT_01_Codec)
|
||||
.typecase(0x02, LegacyChannelCodecs.DATA_WAIT_FOR_FUNDING_LOCKED_Codec)
|
||||
.typecase(0x03, LegacyChannelCodecs.DATA_NORMAL_COMPAT_03_Codec)
|
||||
.typecase(0x04, LegacyChannelCodecs.DATA_SHUTDOWN_Codec)
|
||||
.typecase(0x05, LegacyChannelCodecs.DATA_NEGOTIATING_Codec)
|
||||
.typecase(0x06, LegacyChannelCodecs.DATA_CLOSING_COMPAT_06_Codec)
|
||||
.typecase(0x07, LegacyChannelCodecs.DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT_Codec))
|
||||
|
||||
}
|
||||
|
|
|
@ -99,6 +99,10 @@ object CommonCodecs {
|
|||
|
||||
val varsizebinarydata: Codec[ByteVector] = variableSizeBytes(uint16, bytes)
|
||||
|
||||
def mapCodec[K, V](keyCodec: Codec[K], valueCodec: Codec[V]): Codec[Map[K, V]] = listOfN(uint16, keyCodec ~ valueCodec).xmap(_.toMap, _.toList)
|
||||
|
||||
def setCodec[T](codec: Codec[T]): Codec[Set[T]] = listOfN(uint16, codec).xmap(_.toSet, _.toList)
|
||||
|
||||
val listofsignatures: Codec[List[ByteVector64]] = listOfN(uint16, bytes64)
|
||||
|
||||
val ipv4address: Codec[Inet4Address] = bytes(4).xmap(b => InetAddress.getByAddress(b.toArray).asInstanceOf[Inet4Address], a => ByteVector(a.getAddress))
|
||||
|
|
|
@ -0,0 +1,356 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.util.UUID
|
||||
|
||||
import akka.actor.ActorRef
|
||||
import fr.acinq.bitcoin.DeterministicWallet.{ExtendedPrivateKey, KeyPath}
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64, Crypto, OutPoint, Transaction, TxOut}
|
||||
import fr.acinq.eclair.channel._
|
||||
import fr.acinq.eclair.crypto.ShaChain
|
||||
import fr.acinq.eclair.payment.relay.Origin
|
||||
import fr.acinq.eclair.transactions.Transactions._
|
||||
import fr.acinq.eclair.transactions._
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import grizzled.slf4j.Logging
|
||||
import scodec.bits.BitVector
|
||||
import scodec.codecs._
|
||||
import scodec.{Attempt, Codec}
|
||||
|
||||
import scala.concurrent.duration._
|
||||
|
||||
/**
|
||||
* Those codecs are here solely for backward compatibility reasons.
|
||||
*
|
||||
* Created by PM on 02/06/2017.
|
||||
*/
|
||||
private[wire] object LegacyChannelCodecs extends Logging {
|
||||
|
||||
val keyPathCodec: Codec[KeyPath] = ("path" | listOfN(uint16, uint32)).xmap[KeyPath](l => new KeyPath(l), keyPath => keyPath.path.toList).as[KeyPath].decodeOnly
|
||||
|
||||
val extendedPrivateKeyCodec: Codec[ExtendedPrivateKey] = (
|
||||
("secretkeybytes" | bytes32) ::
|
||||
("chaincode" | bytes32) ::
|
||||
("depth" | uint16) ::
|
||||
("path" | keyPathCodec) ::
|
||||
("parent" | int64)).as[ExtendedPrivateKey].decodeOnly
|
||||
|
||||
val channelVersionCodec: Codec[ChannelVersion] = discriminatorWithDefault[ChannelVersion](
|
||||
discriminator = discriminated[ChannelVersion].by(byte)
|
||||
.typecase(0x01, bits(ChannelVersion.LENGTH_BITS).as[ChannelVersion])
|
||||
// NB: 0x02 and 0x03 are *reserved* for backward compatibility reasons
|
||||
,
|
||||
fallback = provide(ChannelVersion.ZEROES) // README: DO NOT CHANGE THIS !! old channels don't have a channel version
|
||||
// field and don't support additional features which is why all bits are set to 0.
|
||||
)
|
||||
|
||||
def localParamsCodec(channelVersion: ChannelVersion): Codec[LocalParams] = (
|
||||
("nodeId" | publicKey) ::
|
||||
("channelPath" | keyPathCodec) ::
|
||||
("dustLimit" | satoshi) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserve" | satoshi) ::
|
||||
("htlcMinimum" | millisatoshi) ::
|
||||
("toSelfDelay" | cltvExpiryDelta) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
("isFunder" | bool) ::
|
||||
("defaultFinalScriptPubKey" | varsizebinarydata) ::
|
||||
("localPaymentBasepoint" | optional(provide(channelVersion.hasStaticRemotekey), publicKey)) ::
|
||||
("features" | combinedFeaturesCodec)).as[LocalParams].decodeOnly
|
||||
|
||||
val remoteParamsCodec: Codec[RemoteParams] = (
|
||||
("nodeId" | publicKey) ::
|
||||
("dustLimit" | satoshi) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserve" | satoshi) ::
|
||||
("htlcMinimum" | millisatoshi) ::
|
||||
("toSelfDelay" | cltvExpiryDelta) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
("fundingPubKey" | publicKey) ::
|
||||
("revocationBasepoint" | publicKey) ::
|
||||
("paymentBasepoint" | publicKey) ::
|
||||
("delayedPaymentBasepoint" | publicKey) ::
|
||||
("htlcBasepoint" | publicKey) ::
|
||||
("features" | combinedFeaturesCodec)).as[RemoteParams].decodeOnly
|
||||
|
||||
val htlcCodec: Codec[DirectedHtlc] = discriminated[DirectedHtlc].by(bool)
|
||||
.typecase(true, updateAddHtlcCodec.as[IncomingHtlc])
|
||||
.typecase(false, updateAddHtlcCodec.as[OutgoingHtlc])
|
||||
|
||||
def setCodec[T](codec: Codec[T]): Codec[Set[T]] = Codec[Set[T]](
|
||||
(elems: Set[T]) => listOfN(uint16, codec).encode(elems.toList),
|
||||
(wire: BitVector) => listOfN(uint16, codec).decode(wire).map(_.map(_.toSet))
|
||||
)
|
||||
|
||||
val commitmentSpecCodec: Codec[CommitmentSpec] = (
|
||||
("htlcs" | setCodec(htlcCodec)) ::
|
||||
("feeratePerKw" | uint32) ::
|
||||
("toLocal" | millisatoshi) ::
|
||||
("toRemote" | millisatoshi)).as[CommitmentSpec].decodeOnly
|
||||
|
||||
val outPointCodec: Codec[OutPoint] = variableSizeBytes(uint16, bytes.xmap(d => OutPoint.read(d.toArray), d => OutPoint.write(d)))
|
||||
|
||||
val txOutCodec: Codec[TxOut] = variableSizeBytes(uint16, bytes.xmap(d => TxOut.read(d.toArray), d => TxOut.write(d)))
|
||||
|
||||
val txCodec: Codec[Transaction] = variableSizeBytes(uint16, bytes.xmap(d => Transaction.read(d.toArray), d => Transaction.write(d)))
|
||||
|
||||
val inputInfoCodec: Codec[InputInfo] = (
|
||||
("outPoint" | outPointCodec) ::
|
||||
("txOut" | txOutCodec) ::
|
||||
("redeemScript" | varsizebinarydata)).as[InputInfo].decodeOnly
|
||||
|
||||
val txWithInputInfoCodec: Codec[TransactionWithInputInfo] = discriminated[TransactionWithInputInfo].by(uint16)
|
||||
.typecase(0x01, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[CommitTx])
|
||||
.typecase(0x02, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec) :: ("paymentHash" | bytes32)).as[HtlcSuccessTx])
|
||||
.typecase(0x03, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[HtlcTimeoutTx])
|
||||
.typecase(0x04, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClaimHtlcSuccessTx])
|
||||
.typecase(0x05, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClaimHtlcTimeoutTx])
|
||||
.typecase(0x06, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClaimP2WPKHOutputTx])
|
||||
.typecase(0x07, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClaimDelayedOutputTx])
|
||||
.typecase(0x08, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[MainPenaltyTx])
|
||||
.typecase(0x09, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[HtlcPenaltyTx])
|
||||
.typecase(0x10, (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[ClosingTx])
|
||||
|
||||
// this is a backward compatible codec (we used to store the sig as DER encoded), now we store it as 64-bytes
|
||||
val sig64OrDERCodec: Codec[ByteVector64] = Codec[ByteVector64](
|
||||
(value: ByteVector64) => bytes(64).encode(value),
|
||||
(wire: BitVector) => bytes.decode(wire).map(_.map {
|
||||
case bin64 if bin64.size == 64 => ByteVector64(bin64)
|
||||
case der => Crypto.der2compact(der)
|
||||
})
|
||||
)
|
||||
|
||||
val htlcTxAndSigsCodec: Codec[HtlcTxAndSigs] = (
|
||||
("txinfo" | txWithInputInfoCodec) ::
|
||||
("localSig" | variableSizeBytes(uint16, sig64OrDERCodec)) :: // we store as variable length for historical purposes (we used to store as DER encoded)
|
||||
("remoteSig" | variableSizeBytes(uint16, sig64OrDERCodec))).as[HtlcTxAndSigs].decodeOnly
|
||||
|
||||
val publishableTxsCodec: Codec[PublishableTxs] = (
|
||||
("commitTx" | (("inputInfo" | inputInfoCodec) :: ("tx" | txCodec)).as[CommitTx]) ::
|
||||
("htlcTxsAndSigs" | listOfN(uint16, htlcTxAndSigsCodec))).as[PublishableTxs].decodeOnly
|
||||
|
||||
val localCommitCodec: Codec[LocalCommit] = (
|
||||
("index" | uint64overflow) ::
|
||||
("spec" | commitmentSpecCodec) ::
|
||||
("publishableTxs" | publishableTxsCodec)).as[LocalCommit].decodeOnly
|
||||
|
||||
val remoteCommitCodec: Codec[RemoteCommit] = (
|
||||
("index" | uint64overflow) ::
|
||||
("spec" | commitmentSpecCodec) ::
|
||||
("txid" | bytes32) ::
|
||||
("remotePerCommitmentPoint" | publicKey)).as[RemoteCommit].decodeOnly
|
||||
|
||||
val updateMessageCodec: Codec[UpdateMessage] = lightningMessageCodec.narrow(f => Attempt.successful(f.asInstanceOf[UpdateMessage]), g => g)
|
||||
|
||||
val localChangesCodec: Codec[LocalChanges] = (
|
||||
("proposed" | listOfN(uint16, updateMessageCodec)) ::
|
||||
("signed" | listOfN(uint16, updateMessageCodec)) ::
|
||||
("acked" | listOfN(uint16, updateMessageCodec))).as[LocalChanges].decodeOnly
|
||||
|
||||
val remoteChangesCodec: Codec[RemoteChanges] = (
|
||||
("proposed" | listOfN(uint16, updateMessageCodec)) ::
|
||||
("acked" | listOfN(uint16, updateMessageCodec)) ::
|
||||
("signed" | listOfN(uint16, updateMessageCodec))).as[RemoteChanges].decodeOnly
|
||||
|
||||
val waitingForRevocationCodec: Codec[WaitingForRevocation] = (
|
||||
("nextRemoteCommit" | remoteCommitCodec) ::
|
||||
("sent" | commitSigCodec) ::
|
||||
("sentAfterLocalCommitIndex" | uint64overflow) ::
|
||||
("reSignAsap" | bool)).as[WaitingForRevocation].decodeOnly
|
||||
|
||||
val localCodec: Codec[Origin.Local] = (
|
||||
("id" | uuid) ::
|
||||
("sender" | provide(Option.empty[ActorRef]))
|
||||
).as[Origin.Local]
|
||||
|
||||
val relayedCodec: Codec[Origin.Relayed] = (
|
||||
("originChannelId" | bytes32) ::
|
||||
("originHtlcId" | int64) ::
|
||||
("amountIn" | millisatoshi) ::
|
||||
("amountOut" | millisatoshi)).as[Origin.Relayed]
|
||||
|
||||
// this is for backward compatibility to handle legacy payments that didn't have identifiers
|
||||
val UNKNOWN_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
|
||||
val trampolineRelayedCodec: Codec[Origin.TrampolineRelayed] = (
|
||||
listOfN(uint16, bytes32 ~ int64) ::
|
||||
("sender" | provide(Option.empty[ActorRef]))
|
||||
).as[Origin.TrampolineRelayed]
|
||||
|
||||
val originCodec: Codec[Origin] = discriminated[Origin].by(uint16)
|
||||
.typecase(0x03, localCodec) // backward compatible
|
||||
.typecase(0x01, provide(Origin.Local(UNKNOWN_UUID, None)))
|
||||
.typecase(0x02, relayedCodec)
|
||||
.typecase(0x04, trampolineRelayedCodec)
|
||||
|
||||
val originsListCodec: Codec[List[(Long, Origin)]] = listOfN(uint16, int64 ~ originCodec)
|
||||
|
||||
val originsMapCodec: Codec[Map[Long, Origin]] = Codec[Map[Long, Origin]](
|
||||
(map: Map[Long, Origin]) => originsListCodec.encode(map.toList),
|
||||
(wire: BitVector) => originsListCodec.decode(wire).map(_.map(_.toMap))
|
||||
)
|
||||
|
||||
val spentListCodec: Codec[List[(OutPoint, ByteVector32)]] = listOfN(uint16, outPointCodec ~ bytes32)
|
||||
|
||||
val spentMapCodec: Codec[Map[OutPoint, ByteVector32]] = Codec[Map[OutPoint, ByteVector32]](
|
||||
(map: Map[OutPoint, ByteVector32]) => spentListCodec.encode(map.toList),
|
||||
(wire: BitVector) => spentListCodec.decode(wire).map(_.map(_.toMap))
|
||||
)
|
||||
|
||||
val commitmentsCodec: Codec[Commitments] = (
|
||||
("channelVersion" | channelVersionCodec) >>:~ { channelVersion =>
|
||||
("localParams" | localParamsCodec(channelVersion)) ::
|
||||
("remoteParams" | remoteParamsCodec) ::
|
||||
("channelFlags" | byte) ::
|
||||
("localCommit" | localCommitCodec) ::
|
||||
("remoteCommit" | remoteCommitCodec) ::
|
||||
("localChanges" | localChangesCodec) ::
|
||||
("remoteChanges" | remoteChangesCodec) ::
|
||||
("localNextHtlcId" | uint64overflow) ::
|
||||
("remoteNextHtlcId" | uint64overflow) ::
|
||||
("originChannels" | originsMapCodec) ::
|
||||
("remoteNextCommitInfo" | either(bool, waitingForRevocationCodec, publicKey)) ::
|
||||
("commitInput" | inputInfoCodec) ::
|
||||
("remotePerCommitmentSecrets" | ShaChain.shaChainCodec) ::
|
||||
("channelId" | bytes32)
|
||||
}).as[Commitments].decodeOnly
|
||||
|
||||
val closingTxProposedCodec: Codec[ClosingTxProposed] = (
|
||||
("unsignedTx" | txCodec) ::
|
||||
("localClosingSigned" | closingSignedCodec)).as[ClosingTxProposed].decodeOnly
|
||||
|
||||
val localCommitPublishedCodec: Codec[LocalCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainDelayedOutputTx" | optional(bool, txCodec)) ::
|
||||
("htlcSuccessTxs" | listOfN(uint16, txCodec)) ::
|
||||
("htlcTimeoutTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcDelayedTx" | listOfN(uint16, txCodec)) ::
|
||||
("spent" | spentMapCodec)).as[LocalCommitPublished].decodeOnly
|
||||
|
||||
val remoteCommitPublishedCodec: Codec[RemoteCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainOutputTx" | optional(bool, txCodec)) ::
|
||||
("claimHtlcSuccessTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcTimeoutTxs" | listOfN(uint16, txCodec)) ::
|
||||
("spent" | spentMapCodec)).as[RemoteCommitPublished].decodeOnly
|
||||
|
||||
val revokedCommitPublishedCodec: Codec[RevokedCommitPublished] = (
|
||||
("commitTx" | txCodec) ::
|
||||
("claimMainOutputTx" | optional(bool, txCodec)) ::
|
||||
("mainPenaltyTx" | optional(bool, txCodec)) ::
|
||||
("htlcPenaltyTxs" | listOfN(uint16, txCodec)) ::
|
||||
("claimHtlcDelayedPenaltyTxs" | listOfN(uint16, txCodec)) ::
|
||||
("spent" | spentMapCodec)).as[RevokedCommitPublished].decodeOnly
|
||||
|
||||
// this is a decode-only codec compatible with versions 997acee and below, with placeholders for new fields
|
||||
val DATA_WAIT_FOR_FUNDING_CONFIRMED_COMPAT_01_Codec: Codec[DATA_WAIT_FOR_FUNDING_CONFIRMED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | provide[Option[Transaction]](None)) ::
|
||||
("waitingSince" | provide(System.currentTimeMillis.milliseconds.toSeconds)) ::
|
||||
("deferred" | optional(bool, fundingLockedCodec)) ::
|
||||
("lastSent" | either(bool, fundingCreatedCodec, fundingSignedCodec))).as[DATA_WAIT_FOR_FUNDING_CONFIRMED].decodeOnly
|
||||
|
||||
val DATA_WAIT_FOR_FUNDING_CONFIRMED_Codec: Codec[DATA_WAIT_FOR_FUNDING_CONFIRMED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | optional(bool, txCodec)) ::
|
||||
("waitingSince" | int64) ::
|
||||
("deferred" | optional(bool, fundingLockedCodec)) ::
|
||||
("lastSent" | either(bool, fundingCreatedCodec, fundingSignedCodec))).as[DATA_WAIT_FOR_FUNDING_CONFIRMED].decodeOnly
|
||||
|
||||
val DATA_WAIT_FOR_FUNDING_LOCKED_Codec: Codec[DATA_WAIT_FOR_FUNDING_LOCKED] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("lastSent" | fundingLockedCodec)).as[DATA_WAIT_FOR_FUNDING_LOCKED].decodeOnly
|
||||
|
||||
// All channel_announcement's written prior to supporting unknown trailing fields had the same fixed size, because
|
||||
// those are the announcements that *we* created and we always used an empty features field, which was the only
|
||||
// variable-length field.
|
||||
val noUnknownFieldsChannelAnnouncementSizeCodec: Codec[Int] = provide(430)
|
||||
|
||||
// We used to ignore unknown trailing fields, and assume that channel_update size was known. This is not true anymore,
|
||||
// so we need to tell the codec where to stop, otherwise all the remaining part of the data will be decoded as unknown
|
||||
// fields. Fortunately, we can easily tell what size the channel_update will be.
|
||||
val noUnknownFieldsChannelUpdateSizeCodec: Codec[Int] = peek( // we need to take a peek at a specific byte to know what size the message will be, and then rollback to read the full message
|
||||
ignore(8 * (64 + 32 + 8 + 4)) ~> // we skip the first fields: signature + chain_hash + short_channel_id + timestamp
|
||||
byte // this is the messageFlags byte
|
||||
)
|
||||
.map(messageFlags => if ((messageFlags & 1) != 0) 136 else 128) // depending on the value of option_channel_htlc_max, size will be 128B or 136B
|
||||
.decodeOnly // this is for compat, we only need to decode
|
||||
|
||||
// this is a decode-only codec compatible with versions 9afb26e and below
|
||||
val DATA_NORMAL_COMPAT_03_Codec: Codec[DATA_NORMAL] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("buried" | bool) ::
|
||||
("channelAnnouncement" | optional(bool, variableSizeBytes(noUnknownFieldsChannelAnnouncementSizeCodec, channelAnnouncementCodec))) ::
|
||||
("channelUpdate" | variableSizeBytes(noUnknownFieldsChannelUpdateSizeCodec, channelUpdateCodec)) ::
|
||||
("localShutdown" | optional(bool, shutdownCodec)) ::
|
||||
("remoteShutdown" | optional(bool, shutdownCodec))).as[DATA_NORMAL].decodeOnly
|
||||
|
||||
val DATA_NORMAL_Codec: Codec[DATA_NORMAL] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("buried" | bool) ::
|
||||
("channelAnnouncement" | optional(bool, variableSizeBytes(uint16, channelAnnouncementCodec))) ::
|
||||
("channelUpdate" | variableSizeBytes(uint16, channelUpdateCodec)) ::
|
||||
("localShutdown" | optional(bool, shutdownCodec)) ::
|
||||
("remoteShutdown" | optional(bool, shutdownCodec))).as[DATA_NORMAL].decodeOnly
|
||||
|
||||
val DATA_SHUTDOWN_Codec: Codec[DATA_SHUTDOWN] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("localShutdown" | shutdownCodec) ::
|
||||
("remoteShutdown" | shutdownCodec)).as[DATA_SHUTDOWN].decodeOnly
|
||||
|
||||
val DATA_NEGOTIATING_Codec: Codec[DATA_NEGOTIATING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("localShutdown" | shutdownCodec) ::
|
||||
("remoteShutdown" | shutdownCodec) ::
|
||||
("closingTxProposed" | listOfN(uint16, listOfN(uint16, closingTxProposedCodec))) ::
|
||||
("bestUnpublishedClosingTx_opt" | optional(bool, txCodec))).as[DATA_NEGOTIATING].decodeOnly
|
||||
|
||||
// this is a decode-only codec compatible with versions 818199e and below, with placeholders for new fields
|
||||
val DATA_CLOSING_COMPAT_06_Codec: Codec[DATA_CLOSING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | provide[Option[Transaction]](None)) ::
|
||||
("waitingSince" | provide(System.currentTimeMillis.milliseconds.toSeconds)) ::
|
||||
("mutualCloseProposed" | listOfN(uint16, txCodec)) ::
|
||||
("mutualClosePublished" | listOfN(uint16, txCodec)) ::
|
||||
("localCommitPublished" | optional(bool, localCommitPublishedCodec)) ::
|
||||
("remoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("nextRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("futureRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("revokedCommitPublished" | listOfN(uint16, revokedCommitPublishedCodec))).as[DATA_CLOSING].decodeOnly
|
||||
|
||||
val DATA_CLOSING_Codec: Codec[DATA_CLOSING] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("fundingTx" | optional(bool, txCodec)) ::
|
||||
("waitingSince" | int64) ::
|
||||
("mutualCloseProposed" | listOfN(uint16, txCodec)) ::
|
||||
("mutualClosePublished" | listOfN(uint16, txCodec)) ::
|
||||
("localCommitPublished" | optional(bool, localCommitPublishedCodec)) ::
|
||||
("remoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("nextRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("futureRemoteCommitPublished" | optional(bool, remoteCommitPublishedCodec)) ::
|
||||
("revokedCommitPublished" | listOfN(uint16, revokedCommitPublishedCodec))).as[DATA_CLOSING].decodeOnly
|
||||
|
||||
val DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT_Codec: Codec[DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT] = (
|
||||
("commitments" | commitmentsCodec) ::
|
||||
("remoteChannelReestablish" | channelReestablishCodec)).as[DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT].decodeOnly
|
||||
|
||||
}
|
|
@ -64,7 +64,7 @@ class JsonSerializersSpec extends AnyFunSuite with Logging {
|
|||
defaultFinalScriptPubKey = randomBytes(10 + Random.nextInt(200)),
|
||||
isFunder = Random.nextBoolean(),
|
||||
features = Features(randomBytes(256)),
|
||||
localPaymentBasepoint = None)
|
||||
staticPaymentBasepoint = None)
|
||||
|
||||
logger.info(write(localParams))
|
||||
|
||||
|
|
|
@ -133,7 +133,9 @@ object TestConstants {
|
|||
searchHeuristicsEnabled = false,
|
||||
searchRatioCltv = 0.0,
|
||||
searchRatioChannelAge = 0.0,
|
||||
searchRatioChannelCapacity = 0.0
|
||||
searchRatioChannelCapacity = 0.0,
|
||||
mppMinPartAmount = 15000000 msat,
|
||||
mppMaxParts = 10
|
||||
),
|
||||
socksProxy_opt = None,
|
||||
maxPaymentAttempts = 5,
|
||||
|
@ -217,7 +219,9 @@ object TestConstants {
|
|||
searchHeuristicsEnabled = false,
|
||||
searchRatioCltv = 0.0,
|
||||
searchRatioChannelAge = 0.0,
|
||||
searchRatioChannelCapacity = 0.0
|
||||
searchRatioChannelCapacity = 0.0,
|
||||
mppMinPartAmount = 15000000 msat,
|
||||
mppMaxParts = 10
|
||||
),
|
||||
socksProxy_opt = None,
|
||||
maxPaymentAttempts = 5,
|
||||
|
|
|
@ -16,10 +16,9 @@
|
|||
|
||||
package fr.acinq.eclair.blockchain
|
||||
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.{Base58, ByteVector32, Crypto, OutPoint, Satoshi, Transaction, TxIn, TxOut}
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{ByteVector32, Crypto, OutPoint, Satoshi, Transaction, TxIn, TxOut}
|
||||
import fr.acinq.eclair.LongToBtcAmount
|
||||
import scodec.bits.ByteVector
|
||||
import scodec.bits._
|
||||
|
||||
import scala.concurrent.Future
|
||||
|
@ -31,14 +30,14 @@ class TestWallet extends EclairWallet {
|
|||
|
||||
var rolledback = Set.empty[Transaction]
|
||||
|
||||
override def getBalance: Future[Satoshi] = ???
|
||||
override def getBalance: Future[OnChainBalance] = Future.successful(OnChainBalance(1105 sat, 561 sat))
|
||||
|
||||
override def getReceiveAddress: Future[String] = Future.successful("bcrt1qwcv8naajwn8fjhu8z59q9e6ucrqr068rlcenux")
|
||||
|
||||
override def getReceivePubkey(receiveAddress: Option[String] = None): Future[Crypto.PublicKey] = Future.successful(PublicKey(hex"028feba10d0eafd0fad8fe20e6d9206e6bd30242826de05c63f459a00aced24b12"))
|
||||
|
||||
override def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): Future[MakeFundingTxResponse] =
|
||||
Future.successful(TestWallet.makeDummyFundingTx(pubkeyScript, amount, feeRatePerKw))
|
||||
Future.successful(TestWallet.makeDummyFundingTx(pubkeyScript, amount))
|
||||
|
||||
override def commit(tx: Transaction): Future[Boolean] = Future.successful(true)
|
||||
|
||||
|
@ -52,11 +51,12 @@ class TestWallet extends EclairWallet {
|
|||
|
||||
object TestWallet {
|
||||
|
||||
def makeDummyFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): MakeFundingTxResponse = {
|
||||
def makeDummyFundingTx(pubkeyScript: ByteVector, amount: Satoshi): MakeFundingTxResponse = {
|
||||
val fundingTx = Transaction(version = 2,
|
||||
txIn = TxIn(OutPoint(ByteVector32(ByteVector.fill(32)(1)), 42), signatureScript = Nil, sequence = TxIn.SEQUENCE_FINAL) :: Nil,
|
||||
txOut = TxOut(amount, pubkeyScript) :: Nil,
|
||||
lockTime = 0)
|
||||
MakeFundingTxResponse(fundingTx, 0, 420 sat)
|
||||
}
|
||||
|
||||
}
|
|
@ -17,16 +17,14 @@
|
|||
package fr.acinq.eclair.blockchain.electrum.db.sqlite
|
||||
|
||||
import fr.acinq.bitcoin.{Block, BlockHeader, OutPoint, Satoshi, Transaction, TxIn, TxOut}
|
||||
import fr.acinq.eclair.{TestConstants, randomBytes, randomBytes32}
|
||||
import fr.acinq.eclair.blockchain.electrum.ElectrumClient
|
||||
import fr.acinq.eclair.blockchain.electrum.ElectrumClient.GetMerkleResponse
|
||||
import fr.acinq.eclair.blockchain.electrum.ElectrumWallet.PersistentData
|
||||
import fr.acinq.eclair.blockchain.electrum.db.sqlite.SqliteWalletDb.version
|
||||
import fr.acinq.eclair.wire.ChannelCodecs.txCodec
|
||||
import fr.acinq.eclair.{TestConstants, randomBytes, randomBytes32}
|
||||
import fr.acinq.eclair.wire.CommonCodecs.setCodec
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
import scodec.Codec
|
||||
import scodec.bits.BitVector
|
||||
import scodec.codecs.{constant, listOfN, provide, uint16}
|
||||
|
||||
import scala.util.Random
|
||||
|
||||
|
@ -105,9 +103,9 @@ class SqliteWalletDbSpec extends AnyFunSuite {
|
|||
}
|
||||
|
||||
test("read old persistent data") {
|
||||
import scodec.codecs._
|
||||
import SqliteWalletDb._
|
||||
import fr.acinq.eclair.wire.ChannelCodecs._
|
||||
import scodec.codecs._
|
||||
|
||||
val oldPersistentDataCodec: Codec[PersistentData] = (
|
||||
("version" | constant(BitVector.fromInt(version))) ::
|
||||
|
@ -119,7 +117,7 @@ class SqliteWalletDbSpec extends AnyFunSuite {
|
|||
("history" | historyCodec) ::
|
||||
("proofs" | proofsCodec) ::
|
||||
("pendingTransactions" | listOfN(uint16, txCodec)) ::
|
||||
("locks" | setCodec(txCodec))).as[PersistentData]
|
||||
("locks" | setCodec(txCodec))).as[PersistentData]
|
||||
|
||||
for (i <- 0 until 50) {
|
||||
val data = randomPersistentData
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright 2020 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.blockchain.fee
|
||||
|
||||
import akka.util.Timeout
|
||||
import fr.acinq.eclair.TestConstants
|
||||
import fr.acinq.eclair.db.sqlite.SqliteFeeratesDb
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
import scala.concurrent.duration._
|
||||
import scala.concurrent.{Await, Future}
|
||||
|
||||
|
||||
class DbFeeProviderSpec extends AnyFunSuite {
|
||||
|
||||
val feerates1: FeeratesPerKB = FeeratesPerKB(100, 200, 300, 400, 500, 600, 700)
|
||||
|
||||
test("db fee provider saves feerates in database") {
|
||||
val sqlite = TestConstants.sqliteInMemory()
|
||||
val db = new SqliteFeeratesDb(sqlite)
|
||||
val provider = new DbFeeProvider(db, new ConstantFeeProvider(feerates1))
|
||||
|
||||
assert(db.getFeerates().isEmpty)
|
||||
assert(Await.result(provider.getFeerates, Timeout(30 seconds).duration) == feerates1)
|
||||
assert(db.getFeerates().get == feerates1)
|
||||
}
|
||||
}
|
|
@ -4,8 +4,9 @@ import org.scalatest.funsuite.AnyFunSuite
|
|||
|
||||
class ChannelTypesSpec extends AnyFunSuite {
|
||||
test("standard channel features include deterministic channel key path") {
|
||||
assert(!ChannelVersion.ZEROES.isSet(ChannelVersion.USE_PUBKEY_KEYPATH_BIT))
|
||||
assert(ChannelVersion.STANDARD.isSet(ChannelVersion.USE_PUBKEY_KEYPATH_BIT))
|
||||
assert(ChannelVersion.STATIC_REMOTEKEY.isSet(ChannelVersion.USE_PUBKEY_KEYPATH_BIT))
|
||||
assert(!ChannelVersion.ZEROES.hasPubkeyKeyPath)
|
||||
assert(ChannelVersion.STANDARD.hasPubkeyKeyPath)
|
||||
assert(ChannelVersion.STATIC_REMOTEKEY.hasStaticRemotekey)
|
||||
assert(ChannelVersion.STATIC_REMOTEKEY.hasPubkeyKeyPath)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,8 +92,8 @@ trait StateTestsHelperMethods extends TestKitBase with FixtureTestSuite with Par
|
|||
val channelFlags = if (tags.contains("channels_public")) ChannelFlags.AnnounceChannel else ChannelFlags.Empty
|
||||
val pushMsat = if (tags.contains("no_push_msat")) 0.msat else TestConstants.pushMsat
|
||||
val (aliceParams, bobParams) = if(tags.contains("static_remotekey")) {
|
||||
(Alice.channelParams.copy(features = Features(Set(ActivatedFeature(StaticRemoteKey, Optional))), localPaymentBasepoint = Some(Helpers.getWalletPaymentBasepoint(wallet))),
|
||||
Bob.channelParams.copy(features = Features(Set(ActivatedFeature(StaticRemoteKey, Optional))), localPaymentBasepoint = Some(Helpers.getWalletPaymentBasepoint(wallet))))
|
||||
(Alice.channelParams.copy(features = Features(Set(ActivatedFeature(StaticRemoteKey, Optional))), staticPaymentBasepoint = Some(Helpers.getWalletPaymentBasepoint(wallet))),
|
||||
Bob.channelParams.copy(features = Features(Set(ActivatedFeature(StaticRemoteKey, Optional))), staticPaymentBasepoint = Some(Helpers.getWalletPaymentBasepoint(wallet))))
|
||||
} else {
|
||||
(Alice.channelParams, Bob.channelParams)
|
||||
}
|
||||
|
|
|
@ -25,16 +25,16 @@ import fr.acinq.eclair.channel.{ChannelErrorOccurred, LocalError, NetworkFeePaid
|
|||
import fr.acinq.eclair.db.sqlite.SqliteAuditDb
|
||||
import fr.acinq.eclair.db.sqlite.SqliteUtils.{getVersion, using}
|
||||
import fr.acinq.eclair.payment._
|
||||
import fr.acinq.eclair.wire.ChannelCodecs
|
||||
import org.scalatest.Tag
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
|
||||
import scala.compat.Platform
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.Random
|
||||
|
||||
class SqliteAuditDbSpec extends AnyFunSuite {
|
||||
|
||||
val ZERO_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
|
||||
test("init sqlite 2 times in a row") {
|
||||
val sqlite = TestConstants.sqliteInMemory()
|
||||
val db1 = new SqliteAuditDb(sqlite)
|
||||
|
@ -45,7 +45,7 @@ class SqliteAuditDbSpec extends AnyFunSuite {
|
|||
val sqlite = TestConstants.sqliteInMemory()
|
||||
val db = new SqliteAuditDb(sqlite)
|
||||
|
||||
val e1 = PaymentSent(ChannelCodecs.UNKNOWN_UUID, randomBytes32, randomBytes32, 40000 msat, randomKey.publicKey, PaymentSent.PartialPayment(ChannelCodecs.UNKNOWN_UUID, 42000 msat, 1000 msat, randomBytes32, None) :: Nil)
|
||||
val e1 = PaymentSent(ZERO_UUID, randomBytes32, randomBytes32, 40000 msat, randomKey.publicKey, PaymentSent.PartialPayment(ZERO_UUID, 42000 msat, 1000 msat, randomBytes32, None) :: Nil)
|
||||
val pp2a = PaymentReceived.PartialPayment(42000 msat, randomBytes32)
|
||||
val pp2b = PaymentReceived.PartialPayment(42100 msat, randomBytes32)
|
||||
val e2 = PaymentReceived(randomBytes32, pp2a :: pp2b :: Nil)
|
||||
|
@ -107,14 +107,13 @@ class SqliteAuditDbSpec extends AnyFunSuite {
|
|||
db.add(TrampolinePaymentRelayed(randomBytes32, Seq(PaymentRelayed.Part(25000 msat, randomBytes32)), Seq(PaymentRelayed.Part(20000 msat, c4))))
|
||||
db.add(TrampolinePaymentRelayed(randomBytes32, Seq(PaymentRelayed.Part(46000 msat, randomBytes32)), Seq(PaymentRelayed.Part(16000 msat, c2), PaymentRelayed.Part(10000 msat, c4), PaymentRelayed.Part(14000 msat, c4))))
|
||||
|
||||
db.add(NetworkFeePaid(null, n1, c1, Transaction(0, Seq.empty, Seq.empty, 0), 100 sat, "funding"))
|
||||
db.add(NetworkFeePaid(null, n2, c2, Transaction(0, Seq.empty, Seq.empty, 0), 200 sat, "funding"))
|
||||
db.add(NetworkFeePaid(null, n2, c2, Transaction(0, Seq.empty, Seq.empty, 0), 300 sat, "mutual"))
|
||||
db.add(NetworkFeePaid(null, n3, c3, Transaction(0, Seq.empty, Seq.empty, 0), 400 sat, "funding"))
|
||||
db.add(NetworkFeePaid(null, n4, c4, Transaction(0, Seq.empty, Seq.empty, 0), 500 sat, "funding"))
|
||||
|
||||
assert(db.stats.toSet === Set(
|
||||
Stats(channelId = c1, avgPaymentAmount = 42 sat, paymentCount = 3, relayFee = 4 sat, networkFee = 100 sat),
|
||||
Stats(channelId = c1, avgPaymentAmount = 42 sat, paymentCount = 3, relayFee = 4 sat, networkFee = 0 sat),
|
||||
Stats(channelId = c2, avgPaymentAmount = 40 sat, paymentCount = 2, relayFee = 4 sat, networkFee = 500 sat),
|
||||
Stats(channelId = c3, avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 400 sat),
|
||||
Stats(channelId = c4, avgPaymentAmount = 30 sat, paymentCount = 2, relayFee = 9 sat, networkFee = 500 sat)
|
||||
|
@ -204,7 +203,7 @@ class SqliteAuditDbSpec extends AnyFunSuite {
|
|||
}
|
||||
|
||||
// existing rows in the 'sent' table will use id=00000000-0000-0000-0000-000000000000 as default
|
||||
assert(migratedDb.listSent(0, (System.currentTimeMillis.milliseconds + 1.minute).toMillis) === Seq(ps.copy(id = ChannelCodecs.UNKNOWN_UUID, parts = Seq(ps.parts.head.copy(id = ChannelCodecs.UNKNOWN_UUID)))))
|
||||
assert(migratedDb.listSent(0, (System.currentTimeMillis.milliseconds + 1.minute).toMillis) === Seq(ps.copy(id = ZERO_UUID, parts = Seq(ps.parts.head.copy(id = ZERO_UUID)))))
|
||||
|
||||
val postMigrationDb = new SqliteAuditDb(connection)
|
||||
|
||||
|
@ -217,7 +216,7 @@ class SqliteAuditDbSpec extends AnyFunSuite {
|
|||
postMigrationDb.add(e2)
|
||||
|
||||
// the old record will have the UNKNOWN_UUID but the new ones will have their actual id
|
||||
val expected = Seq(ps.copy(id = ChannelCodecs.UNKNOWN_UUID, parts = Seq(ps.parts.head.copy(id = ChannelCodecs.UNKNOWN_UUID))), ps1)
|
||||
val expected = Seq(ps.copy(id = ZERO_UUID, parts = Seq(ps.parts.head.copy(id = ZERO_UUID))), ps1)
|
||||
assert(postMigrationDb.listSent(0, (System.currentTimeMillis.milliseconds + 1.minute).toMillis) === expected)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Copyright 2020 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.db
|
||||
|
||||
import fr.acinq.eclair._
|
||||
import fr.acinq.eclair.blockchain.fee.FeeratesPerKB
|
||||
import fr.acinq.eclair.db.sqlite.SqliteFeeratesDb
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
|
||||
class SqliteFeeratesDbSpec extends AnyFunSuite {
|
||||
|
||||
test("init sqlite 2 times in a row") {
|
||||
val sqlite = TestConstants.sqliteInMemory()
|
||||
val db1 = new SqliteFeeratesDb(sqlite)
|
||||
val db2 = new SqliteFeeratesDb(sqlite)
|
||||
}
|
||||
|
||||
test("add/get feerates") {
|
||||
val sqlite = TestConstants.sqliteInMemory()
|
||||
val db = new SqliteFeeratesDb(sqlite)
|
||||
val feerate = FeeratesPerKB(
|
||||
block_1 = 150000,
|
||||
blocks_2 = 120000,
|
||||
blocks_6 = 100000,
|
||||
blocks_12 = 90000,
|
||||
blocks_36 = 70000,
|
||||
blocks_72 = 50000,
|
||||
blocks_144 = 20000)
|
||||
|
||||
db.addOrUpdateFeerates(feerate)
|
||||
assert(db.getFeerates().get == feerate)
|
||||
}
|
||||
}
|
|
@ -297,9 +297,9 @@ class PeerSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTe
|
|||
val info = probe.expectMsgType[RES_GETINFO]
|
||||
assert(info.state == WAIT_FOR_ACCEPT_CHANNEL)
|
||||
val inputInit = info.data.asInstanceOf[DATA_WAIT_FOR_ACCEPT_CHANNEL].initFunder
|
||||
assert(inputInit.channelVersion.isSet(ChannelVersion.USE_STATIC_REMOTEKEY_BIT))
|
||||
assert(inputInit.localParams.localPaymentBasepoint.isDefined)
|
||||
assert(inputInit.localParams.defaultFinalScriptPubKey === Script.write(Script.pay2wpkh(inputInit.localParams.localPaymentBasepoint.get)))
|
||||
assert(inputInit.channelVersion.hasStaticRemotekey)
|
||||
assert(inputInit.localParams.staticPaymentBasepoint.isDefined)
|
||||
assert(inputInit.localParams.defaultFinalScriptPubKey === Script.write(Script.pay2wpkh(inputInit.localParams.staticPaymentBasepoint.get)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -283,7 +283,7 @@ class MultiPartPaymentLifecycleSpec extends TestKitBaseClass with FixtureAnyFunS
|
|||
|
||||
test("split fees between child payments") { f =>
|
||||
import f._
|
||||
val routeParams = RouteParams(randomize = false, 100 msat, 0.05, 20, CltvExpiryDelta(144), None)
|
||||
val routeParams = RouteParams(randomize = false, 100 msat, 0.05, 20, CltvExpiryDelta(144), None, MultiPartParams(10000 msat, 5))
|
||||
val payment = SendMultiPartPayment(randomBytes32, e, 3000 * 1000 msat, expiry, 3, routeParams = Some(routeParams))
|
||||
initPayment(f, payment, emptyStats.copy(capacity = Stats.generate(Seq(1000), d => Satoshi(d.toLong))), localChannels())
|
||||
waitUntilAmountSent(f, 3000 * 1000 msat)
|
||||
|
@ -494,7 +494,7 @@ class MultiPartPaymentLifecycleSpec extends TestKitBaseClass with FixtureAnyFunS
|
|||
// We have a total of 6500 satoshis across all channels. We try to send lower amounts to take fees into account.
|
||||
val toSend = ((1 + Random.nextInt(3500)) * 1000).msat
|
||||
val networkStats = emptyStats.copy(capacity = Stats.generate(Seq(400 + Random.nextInt(1600)), d => Satoshi(d.toLong)))
|
||||
val routeParams = RouteParams(randomize = true, Random.nextInt(1000).msat, Random.nextInt(10).toDouble / 100, 20, CltvExpiryDelta(144), None)
|
||||
val routeParams = RouteParams(randomize = true, Random.nextInt(1000).msat, Random.nextInt(10).toDouble / 100, 20, CltvExpiryDelta(144), None, MultiPartParams(10000 msat, 5))
|
||||
val request = SendMultiPartPayment(randomBytes32, e, toSend, CltvExpiry(561), 1, Nil, Some(routeParams))
|
||||
val fuzzParams = s"(sending $toSend with network capacity ${networkStats.capacity.percentile75.toMilliSatoshi}, fee base ${routeParams.maxFeeBase} and fee percentage ${routeParams.maxFeePct})"
|
||||
val (remaining, payments) = splitPayment(f.nodeParams, toSend, testChannels.channels, Some(networkStats), request, randomize = true)
|
||||
|
|
|
@ -32,7 +32,7 @@ import fr.acinq.eclair.payment.send.MultiPartPaymentLifecycle.SendMultiPartPayme
|
|||
import fr.acinq.eclair.payment.send.PaymentInitiator._
|
||||
import fr.acinq.eclair.payment.send.PaymentLifecycle.{SendPayment, SendPaymentToRoute}
|
||||
import fr.acinq.eclair.payment.send.{PaymentError, PaymentInitiator}
|
||||
import fr.acinq.eclair.router.Router.{NodeHop, RouteParams}
|
||||
import fr.acinq.eclair.router.Router.{MultiPartParams, NodeHop, RouteParams}
|
||||
import fr.acinq.eclair.wire.Onion.{FinalLegacyPayload, FinalTlvPayload}
|
||||
import fr.acinq.eclair.wire.OnionTlv.{AmountToForward, OutgoingCltv}
|
||||
import fr.acinq.eclair.wire.{Onion, OnionCodecs, OnionTlv, TrampolineFeeInsufficient, _}
|
||||
|
@ -122,7 +122,7 @@ class PaymentInitiatorSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike
|
|||
test("forward legacy payment") { f =>
|
||||
import f._
|
||||
val hints = Seq(Seq(ExtraHop(b, channelUpdate_bc.shortChannelId, feeBase = 10 msat, feeProportionalMillionths = 1, cltvExpiryDelta = CltvExpiryDelta(12))))
|
||||
val routeParams = RouteParams(randomize = true, 15 msat, 1.5, 5, CltvExpiryDelta(561), None)
|
||||
val routeParams = RouteParams(randomize = true, 15 msat, 1.5, 5, CltvExpiryDelta(561), None, MultiPartParams(10000 msat, 5))
|
||||
sender.send(initiator, SendPaymentRequest(finalAmount, paymentHash, c, 1, CltvExpiryDelta(42), assistedRoutes = hints, routeParams = Some(routeParams)))
|
||||
val id1 = sender.expectMsgType[UUID]
|
||||
payFsm.expectMsg(SendPaymentConfig(id1, id1, None, paymentHash, finalAmount, c, Upstream.Local(id1), None, storeInDb = true, publishEvent = true, Nil))
|
||||
|
|
|
@ -209,7 +209,7 @@ class PaymentLifecycleSpec extends BaseRouterSpec {
|
|||
val payFixture = createPaymentLifecycle()
|
||||
import payFixture._
|
||||
|
||||
val request = SendPayment(d, FinalLegacyPayload(defaultAmountMsat, defaultExpiry), 5, routeParams = Some(RouteParams(randomize = false, maxFeeBase = 100 msat, maxFeePct = 0.0, routeMaxLength = 20, routeMaxCltv = CltvExpiryDelta(2016), ratios = None)))
|
||||
val request = SendPayment(d, FinalLegacyPayload(defaultAmountMsat, defaultExpiry), 5, routeParams = Some(RouteParams(randomize = false, 100 msat, 0.0, 20, CltvExpiryDelta(2016), None, MultiPartParams(10000 msat, 5))))
|
||||
sender.send(paymentFSM, request)
|
||||
val routeRequest = routerForwarder.expectMsgType[RouteRequest]
|
||||
val Transition(_, WAITING_FOR_REQUEST, WAITING_FOR_ROUTE) = monitor.expectMsgClass(classOf[Transition[_]])
|
||||
|
|
|
@ -27,12 +27,12 @@ import fr.acinq.eclair.router.Router._
|
|||
import fr.acinq.eclair.transactions.Transactions
|
||||
import fr.acinq.eclair.wire._
|
||||
import fr.acinq.eclair.{CltvExpiryDelta, Features, LongToBtcAmount, MilliSatoshi, ShortChannelId, ToMilliSatoshiConversion, randomKey}
|
||||
import org.scalatest.ParallelTestExecution
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
import org.scalatest.{ParallelTestExecution, Tag}
|
||||
import scodec.bits._
|
||||
|
||||
import scala.collection.immutable.SortedMap
|
||||
import scala.util.{Failure, Success}
|
||||
import scala.util.{Failure, Random, Success}
|
||||
|
||||
/**
|
||||
* Created by PM on 31/05/2016.
|
||||
|
@ -925,9 +925,10 @@ class RouteCalculationSpec extends AnyFunSuite with ParallelTestExecution {
|
|||
)
|
||||
|
||||
val g = DirectedGraph.makeGraph(updates)
|
||||
val params = RouteParams(randomize = false, maxFeeBase = 21000 msat, maxFeePct = 0.03, routeMaxCltv = CltvExpiryDelta(1008), routeMaxLength = 6, ratios = Some(
|
||||
WeightRatios(cltvDeltaFactor = 0.15, ageFactor = 0.35, capacityFactor = 0.5)
|
||||
))
|
||||
val params = DEFAULT_ROUTE_PARAMS.copy(
|
||||
routeMaxCltv = CltvExpiryDelta(1008),
|
||||
ratios = Some(WeightRatios(cltvDeltaFactor = 0.15, ageFactor = 0.35, capacityFactor = 0.5))
|
||||
)
|
||||
val thisNode = PublicKey(hex"036d65409c41ab7380a43448f257809e7496b52bf92057c09c4f300cbd61c50d96")
|
||||
val targetNode = PublicKey(hex"024655b768ef40951b20053a5c4b951606d4d86085d51238f2c67c7dec29c792ca")
|
||||
val amount = 351000 msat
|
||||
|
@ -957,6 +958,583 @@ class RouteCalculationSpec extends AnyFunSuite with ParallelTestExecution {
|
|||
assert(!Graph.validatePath(Seq(ab, bc, cd), 250 msat)) // above balance (AB)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (many channels, known balance)") {
|
||||
val amount = 65000 msat
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, balance_opt = Some(25000 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, balance_opt = Some(20000 msat)),
|
||||
makeEdge(4L, a, b, 100 msat, 20, minHtlc = 1 msat, balance_opt = Some(10000 msat))
|
||||
))
|
||||
// We set max-parts to 3, but it should be ignored when sending to a direct neighbor.
|
||||
val routeParams = DEFAULT_ROUTE_PARAMS.copy(mpp = MultiPartParams(2500 msat, 3))
|
||||
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
assert(routes.length === 4, routes)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = routeParams.copy(randomize = true), currentBlockHeight = 400000)
|
||||
assert(routes.length >= 4, routes)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (single channel, known balance)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(25000 msat)),
|
||||
makeEdge(2L, a, c, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(50000 msat)),
|
||||
makeEdge(3L, c, b, 1 msat, 0, minHtlc = 1 msat),
|
||||
makeEdge(4L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 25000 msat
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.length === 1, routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
assert(route2Ids(routes.head) === 1L :: Nil)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (many channels, some balance unknown)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, balance_opt = Some(25000 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, balance_opt = None, capacity = 20 sat),
|
||||
makeEdge(4L, a, b, 100 msat, 20, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(5L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 65000 msat
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.length === 4, routes)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (many channels, some empty)") {
|
||||
val amount = 35000 msat
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, balance_opt = Some(0 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, balance_opt = None, capacity = 15 sat),
|
||||
makeEdge(4L, a, b, 100 msat, 20, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(5L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.length === 3, routes)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkIgnoredChannels(routes, 2L)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000)
|
||||
assert(routes.length >= 3, routes)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkIgnoredChannels(routes, 2L)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (ignored channels)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, balance_opt = Some(25000 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, balance_opt = None, capacity = 50 sat),
|
||||
makeEdge(4L, a, b, 100 msat, 20, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(5L, a, b, 1 msat, 10, minHtlc = 1 msat, balance_opt = None, capacity = 10 sat),
|
||||
makeEdge(6L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 20000 msat
|
||||
val ignoredEdges = Set(ChannelDesc(ShortChannelId(2L), a, b), ChannelDesc(ShortChannelId(3L), a, b))
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, ignoredEdges = ignoredEdges, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkIgnoredChannels(routes, 2L, 3L)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (pending htlcs ignored for local channels)") {
|
||||
val edge_ab_1 = makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat))
|
||||
val edge_ab_2 = makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, balance_opt = Some(25000 msat))
|
||||
val edge_ab_3 = makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, balance_opt = None, capacity = 15 sat)
|
||||
val g = DirectedGraph(List(
|
||||
edge_ab_1,
|
||||
edge_ab_2,
|
||||
edge_ab_3,
|
||||
makeEdge(4L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 50000 msat
|
||||
// These pending HTLCs will have already been taken into account in the edge's `balance_opt` field: findMultiPartRoute
|
||||
// should ignore this information.
|
||||
val pendingHtlcs = Seq(Route(10000 msat, ChannelHop(a, b, edge_ab_1.update) :: Nil), Route(5000 msat, ChannelHop(a, b, edge_ab_2.update) :: Nil))
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, pendingHtlcs = pendingHtlcs, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (restricted htlc_maximum_msat)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 25 msat, 15, minHtlc = 1 msat, maxHtlc = Some(5000 msat), balance_opt = Some(18000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1 msat, maxHtlc = Some(5000 msat), balance_opt = Some(23000 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 1 msat, maxHtlc = Some(5000 msat), balance_opt = Some(21000 msat)),
|
||||
makeEdge(4L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 50000 msat
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
assert(routes.length >= 10, routes)
|
||||
assert(routes.forall(_.amount <= 5000.msat), routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (restricted htlc_minimum_msat)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 25 msat, 15, minHtlc = 2500 msat, balance_opt = Some(18000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 2500 msat, balance_opt = Some(7000 msat)),
|
||||
makeEdge(3L, a, b, 1 msat, 50, minHtlc = 2500 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(4L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
val amount = 30000 msat
|
||||
val routeParams = DEFAULT_ROUTE_PARAMS.copy(mpp = MultiPartParams(2500 msat, 5))
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length == 1), routes)
|
||||
assert(routes.length == 3, routes)
|
||||
checkRouteAmounts(routes, amount, 0 msat)
|
||||
}
|
||||
|
||||
test("calculate multipart route to neighbor (through remote channels)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 25 msat, 15, minHtlc = 1000 msat, balance_opt = Some(18000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 1000 msat, balance_opt = Some(7000 msat)),
|
||||
makeEdge(3L, a, c, 1000 msat, 10000, minHtlc = 1000 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(4L, c, b, 10 msat, 1000, minHtlc = 1000 msat),
|
||||
makeEdge(5L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(25000 msat))
|
||||
))
|
||||
|
||||
val amount = 30000 msat
|
||||
val maxFeeTooLow = findMultiPartRoute(g, a, b, amount, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(maxFeeTooLow === Failure(RouteNotFound))
|
||||
|
||||
val Success(routes) = findMultiPartRoute(g, a, b, amount, 20 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length <= 2), routes)
|
||||
assert(routes.length == 3, routes)
|
||||
checkRouteAmounts(routes, amount, 20 msat)
|
||||
}
|
||||
|
||||
test("cannot find multipart route to neighbor (not enough balance)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 0 msat, 0, minHtlc = 1 msat, balance_opt = Some(15000 msat)),
|
||||
makeEdge(2L, a, b, 0 msat, 0, minHtlc = 1 msat, balance_opt = Some(5000 msat)),
|
||||
makeEdge(3L, a, b, 0 msat, 0, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(4L, a, d, 0 msat, 0, minHtlc = 1 msat, balance_opt = Some(45000 msat))
|
||||
))
|
||||
|
||||
{
|
||||
val result = findMultiPartRoute(g, a, b, 40000 msat, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(result === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
val result = findMultiPartRoute(g, a, b, 40000 msat, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000)
|
||||
assert(result === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("cannot find multipart route to neighbor (not enough capacity)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 0 msat, 0, minHtlc = 1 msat, capacity = 1500 sat),
|
||||
makeEdge(2L, a, b, 0 msat, 0, minHtlc = 1 msat, capacity = 2000 sat),
|
||||
makeEdge(3L, a, b, 0 msat, 0, minHtlc = 1 msat, capacity = 1200 sat),
|
||||
makeEdge(4L, a, d, 0 msat, 0, minHtlc = 1 msat, capacity = 4500 sat)
|
||||
))
|
||||
|
||||
val result = findMultiPartRoute(g, a, b, 5000000 msat, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(result === Failure(RouteNotFound))
|
||||
}
|
||||
|
||||
test("cannot find multipart route to neighbor (restricted htlc_minimum_msat)") {
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 25 msat, 15, minHtlc = 5000 msat, balance_opt = Some(6000 msat)),
|
||||
makeEdge(2L, a, b, 15 msat, 10, minHtlc = 5000 msat, balance_opt = Some(7000 msat)),
|
||||
makeEdge(3L, a, d, 0 msat, 0, minHtlc = 5000 msat, balance_opt = Some(9000 msat))
|
||||
))
|
||||
|
||||
{
|
||||
val result = findMultiPartRoute(g, a, b, 10000 msat, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(result === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
val result = findMultiPartRoute(g, a, b, 10000 msat, 1 msat, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000)
|
||||
assert(result === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (many local channels)") {
|
||||
// +-------+
|
||||
// | |
|
||||
// A ----- C ----- E
|
||||
// | |
|
||||
// +--- B --- D ---+
|
||||
val (amount, maxFee) = (30000 msat, 150 msat)
|
||||
val edge_ab = makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(15000 msat))
|
||||
val g = DirectedGraph(List(
|
||||
edge_ab,
|
||||
makeEdge(2L, b, d, 15 msat, 0, minHtlc = 1 msat, capacity = 25 sat),
|
||||
makeEdge(3L, d, e, 15 msat, 0, minHtlc = 1 msat, capacity = 20 sat),
|
||||
makeEdge(4L, a, c, 1 msat, 50, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(5L, a, c, 1 msat, 50, minHtlc = 1 msat, balance_opt = Some(8000 msat)),
|
||||
makeEdge(6L, c, e, 50 msat, 30, minHtlc = 1 msat, capacity = 20 sat)
|
||||
))
|
||||
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes2Ids(routes) === Set(Seq(1L, 2L, 3L), Seq(4L, 6L), Seq(5L, 6L)))
|
||||
}
|
||||
{
|
||||
// Update A - B with unknown balance, capacity should be used instead.
|
||||
val g1 = g.addEdge(edge_ab.copy(capacity = 15 sat, balance_opt = None))
|
||||
val Success(routes) = findMultiPartRoute(g1, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes2Ids(routes) === Set(Seq(1L, 2L, 3L), Seq(4L, 6L), Seq(5L, 6L)))
|
||||
}
|
||||
{
|
||||
// Randomize routes.
|
||||
val Success(routes) = findMultiPartRoute(g, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
// Update balance A - B to be too low.
|
||||
val g1 = g.addEdge(edge_ab.copy(balance_opt = Some(2000 msat)))
|
||||
val failure = findMultiPartRoute(g1, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
// Update capacity A - B to be too low.
|
||||
val g1 = g.addEdge(edge_ab.copy(capacity = 5 sat, balance_opt = None))
|
||||
val failure = findMultiPartRoute(g1, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
// Try to find a route with a maxFee that's too low.
|
||||
val maxFeeTooLow = 100 msat
|
||||
val failure = findMultiPartRoute(g, a, e, amount, maxFeeTooLow, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (tiny amount)") {
|
||||
// A ----- C ----- E
|
||||
// | |
|
||||
// +--- B --- D ---+
|
||||
// Our balance and the amount we want to send are below the minimum part amount.
|
||||
val routeParams = DEFAULT_ROUTE_PARAMS.copy(mpp = MultiPartParams(5000 msat, 5))
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(1500 msat)),
|
||||
makeEdge(2L, b, d, 15 msat, 0, minHtlc = 1 msat, capacity = 25 sat),
|
||||
makeEdge(3L, d, e, 15 msat, 0, minHtlc = 1 msat, capacity = 20 sat),
|
||||
makeEdge(4L, a, c, 1 msat, 50, minHtlc = 1 msat, balance_opt = Some(1000 msat)),
|
||||
makeEdge(5L, c, e, 50 msat, 30, minHtlc = 1 msat, capacity = 20 sat)
|
||||
))
|
||||
|
||||
{
|
||||
// We can send single-part tiny payments.
|
||||
val (amount, maxFee) = (1400 msat, 30 msat)
|
||||
val Success(routes) = findMultiPartRoute(g, a, e, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
// But we don't want to split such tiny amounts.
|
||||
val (amount, maxFee) = (2000 msat, 150 msat)
|
||||
val failure = findMultiPartRoute(g, a, e, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (single path)") {
|
||||
val (amount, maxFee) = (100000 msat, 500 msat)
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(500000 msat)),
|
||||
makeEdge(2L, b, c, 10 msat, 30, minHtlc = 1 msat, capacity = 150 sat),
|
||||
makeEdge(3L, c, d, 15 msat, 50, minHtlc = 1 msat, capacity = 150 sat)
|
||||
))
|
||||
|
||||
val Success(routes) = findMultiPartRoute(g, a, d, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes.length === 1, "payment shouldn't be split when we have one path with enough capacity")
|
||||
assert(routes2Ids(routes) === Set(Seq(1L, 2L, 3L)))
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (single local channel)") {
|
||||
// +--- C ---+
|
||||
// | |
|
||||
// A --- B ------- D --- F
|
||||
// | |
|
||||
// +----- E -------+
|
||||
val (amount, maxFee) = (400000 msat, 250 msat)
|
||||
val edge_ab = makeEdge(1L, a, b, 50 msat, 100, minHtlc = 1 msat, balance_opt = Some(500000 msat))
|
||||
val g = DirectedGraph(List(
|
||||
edge_ab,
|
||||
makeEdge(2L, b, c, 10 msat, 30, minHtlc = 1 msat, capacity = 150 sat),
|
||||
makeEdge(3L, c, d, 15 msat, 50, minHtlc = 1 msat, capacity = 150 sat),
|
||||
makeEdge(4L, b, d, 20 msat, 75, minHtlc = 1 msat, capacity = 180 sat),
|
||||
makeEdge(5L, d, f, 5 msat, 50, minHtlc = 1 msat, capacity = 300 sat),
|
||||
makeEdge(6L, b, e, 15 msat, 80, minHtlc = 1 msat, capacity = 210 sat),
|
||||
makeEdge(7L, e, f, 15 msat, 100, minHtlc = 1 msat, capacity = 200 sat)
|
||||
))
|
||||
|
||||
{
|
||||
val Success(routes) = findMultiPartRoute(g, a, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes2Ids(routes) === Set(Seq(1L, 2L, 3L, 5L), Seq(1L, 4L, 5L), Seq(1L, 6L, 7L)))
|
||||
}
|
||||
{
|
||||
// Randomize routes.
|
||||
val Success(routes) = findMultiPartRoute(g, a, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
// Update A - B with unknown balance, capacity should be used instead.
|
||||
val g1 = g.addEdge(edge_ab.copy(capacity = 500 sat, balance_opt = None))
|
||||
val Success(routes) = findMultiPartRoute(g1, a, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes2Ids(routes) === Set(Seq(1L, 2L, 3L, 5L), Seq(1L, 4L, 5L), Seq(1L, 6L, 7L)))
|
||||
}
|
||||
{
|
||||
// Update balance A - B to be too low to cover fees.
|
||||
val g1 = g.addEdge(edge_ab.copy(balance_opt = Some(400000 msat)))
|
||||
val failure = findMultiPartRoute(g1, a, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
// Update capacity A - B to be too low to cover fees.
|
||||
val g1 = g.addEdge(edge_ab.copy(capacity = 400 sat, balance_opt = None))
|
||||
val failure = findMultiPartRoute(g1, a, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
// Try to find a route with a maxFee that's too low.
|
||||
val maxFeeTooLow = 100 msat
|
||||
val failure = findMultiPartRoute(g, a, f, amount, maxFeeTooLow, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (ignored channels and nodes)") {
|
||||
// +----- B --xxx-- C -----+
|
||||
// | +-------- D --------+ |
|
||||
// | | | |
|
||||
// +---+ (empty) +---+
|
||||
// | A | --------------- | F |
|
||||
// +---+ +---+
|
||||
// | | (not empty) | |
|
||||
// | +-------------------+ |
|
||||
// +---------- E ----------+
|
||||
val (amount, maxFee) = (25000 msat, 5 msat)
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(75000 msat)),
|
||||
makeEdge(2L, b, c, 1 msat, 0, minHtlc = 1 msat, capacity = 150 sat),
|
||||
makeEdge(3L, c, f, 1 msat, 0, minHtlc = 1 msat, capacity = 150 sat),
|
||||
makeEdge(4L, a, d, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(85000 msat)),
|
||||
makeEdge(5L, d, f, 1 msat, 0, minHtlc = 1 msat, capacity = 300 sat),
|
||||
makeEdge(6L, a, f, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(0 msat)),
|
||||
makeEdge(7L, a, f, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(8L, a, e, 1 msat, 0, minHtlc = 1 msat, balance_opt = Some(18000 msat)),
|
||||
makeEdge(9L, e, f, 1 msat, 0, minHtlc = 1 msat, capacity = 15 sat)
|
||||
))
|
||||
|
||||
val ignoredNodes = Set(d)
|
||||
val ignoredChannels = Set(ChannelDesc(ShortChannelId(2L), b, c))
|
||||
val Success(routes) = findMultiPartRoute(g, a, f, amount, maxFee, ignoredEdges = ignoredChannels, ignoredVertices = ignoredNodes, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes2Ids(routes) === Set(Seq(7L), Seq(8L, 9L)))
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (restricted htlc_minimum_msat and htlc_maximum_msat)") {
|
||||
// +----- B -----+
|
||||
// | |
|
||||
// A----- C ---- E
|
||||
// | |
|
||||
// +----- D -----+
|
||||
val (amount, maxFee) = (15000 msat, 5 msat)
|
||||
val g = DirectedGraph(List(
|
||||
// The A -> B -> E path is impossible because the A -> B balance is lower than the B -> E htlc_minimum_msat.
|
||||
makeEdge(1L, a, b, 1 msat, 0, minHtlc = 500 msat, balance_opt = Some(7000 msat)),
|
||||
makeEdge(2L, b, e, 1 msat, 0, minHtlc = 10000 msat, capacity = 50 sat),
|
||||
makeEdge(3L, a, c, 1 msat, 0, minHtlc = 500 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(4L, c, e, 1 msat, 0, minHtlc = 500 msat, maxHtlc = Some(4000 msat), capacity = 50 sat),
|
||||
makeEdge(5L, a, d, 1 msat, 0, minHtlc = 500 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(6L, d, e, 1 msat, 0, minHtlc = 500 msat, maxHtlc = Some(4000 msat), capacity = 50 sat)
|
||||
))
|
||||
|
||||
val Success(routes) = findMultiPartRoute(g, a, e, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
assert(routes.length >= 4, routes)
|
||||
assert(routes.forall(_.amount <= 4000.msat), routes)
|
||||
assert(routes.forall(_.amount >= 500.msat), routes)
|
||||
checkIgnoredChannels(routes, 1L, 2L)
|
||||
|
||||
val maxFeeTooLow = 3 msat
|
||||
val failure = findMultiPartRoute(g, a, e, amount, maxFeeTooLow, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (complex graph)") {
|
||||
// +---+ +---+ +---+
|
||||
// | A |-----+ +--->| B |--->| C |
|
||||
// +---+ | | +---+ +---+
|
||||
// ^ | +---+ | |
|
||||
// | +--->| E |---+ |
|
||||
// | | +---+ | |
|
||||
// +---+ | | +---+ |
|
||||
// | D |-----+ +--->| F |<-----+
|
||||
// +---+ +---+
|
||||
val g = DirectedGraph(Seq(
|
||||
makeEdge(1L, d, a, 100 msat, 1000, minHtlc = 1000 msat, balance_opt = Some(80000 msat)),
|
||||
makeEdge(2L, d, e, 100 msat, 1000, minHtlc = 1500 msat, balance_opt = Some(20000 msat)),
|
||||
makeEdge(3L, a, e, 5 msat, 50, minHtlc = 1200 msat, capacity = 100 sat),
|
||||
makeEdge(4L, e, f, 25 msat, 1000, minHtlc = 1300 msat, capacity = 25 sat),
|
||||
makeEdge(5L, e, b, 10 msat, 100, minHtlc = 1100 msat, capacity = 75 sat),
|
||||
makeEdge(6L, b, c, 5 msat, 50, minHtlc = 1000 msat, capacity = 20 sat),
|
||||
makeEdge(7L, c, f, 5 msat, 10, minHtlc = 1500 msat, capacity = 50 sat)
|
||||
))
|
||||
val routeParams = DEFAULT_ROUTE_PARAMS.copy(mpp = MultiPartParams(1500 msat, 10))
|
||||
|
||||
{
|
||||
val (amount, maxFee) = (15000 msat, 50 msat)
|
||||
val Success(routes) = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
val (amount, maxFee) = (25000 msat, 100 msat)
|
||||
val Success(routes) = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
val (amount, maxFee) = (25000 msat, 50 msat)
|
||||
val failure = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
{
|
||||
val (amount, maxFee) = (40000 msat, 100 msat)
|
||||
val Success(routes) = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
val (amount, maxFee) = (40000 msat, 100 msat)
|
||||
val Success(routes) = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams.copy(randomize = true), currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
}
|
||||
{
|
||||
val (amount, maxFee) = (40000 msat, 50 msat)
|
||||
val failure = findMultiPartRoute(g, d, f, amount, maxFee, routeParams = routeParams, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (with extra edges)") {
|
||||
// +--- B ---+
|
||||
// A D (---) E (---) F
|
||||
// +--- C ---+
|
||||
val (amount, maxFeeE, maxFeeF) = (10000 msat, 50 msat, 100 msat)
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, a, b, 1 msat, 0, minHtlc = 1 msat, maxHtlc = Some(4000 msat), balance_opt = Some(7000 msat)),
|
||||
makeEdge(2L, b, d, 1 msat, 0, minHtlc = 1 msat, capacity = 50 sat),
|
||||
makeEdge(3L, a, c, 1 msat, 0, minHtlc = 1 msat, maxHtlc = Some(4000 msat), balance_opt = Some(6000 msat)),
|
||||
makeEdge(4L, c, d, 1 msat, 0, minHtlc = 1 msat, capacity = 40 sat)
|
||||
))
|
||||
val extraEdges = Set(
|
||||
makeEdge(10L, d, e, 10 msat, 100, minHtlc = 500 msat, capacity = 15 sat),
|
||||
makeEdge(11L, e, f, 5 msat, 100, minHtlc = 500 msat, capacity = 10 sat)
|
||||
)
|
||||
|
||||
val Success(routes1) = findMultiPartRoute(g, a, e, amount, maxFeeE, extraEdges = extraEdges, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes1, amount, maxFeeE)
|
||||
assert(routes1.length >= 3, routes1)
|
||||
assert(routes1.forall(_.amount <= 4000.msat), routes1)
|
||||
|
||||
val Success(routes2) = findMultiPartRoute(g, a, f, amount, maxFeeF, extraEdges = extraEdges, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
checkRouteAmounts(routes2, amount, maxFeeF)
|
||||
assert(routes2.length >= 3, routes2)
|
||||
assert(routes2.forall(_.amount <= 4000.msat), routes2)
|
||||
|
||||
val maxFeeTooLow = 40 msat
|
||||
val failure = findMultiPartRoute(g, a, f, amount, maxFeeTooLow, extraEdges = extraEdges, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(failure === Failure(RouteNotFound))
|
||||
}
|
||||
|
||||
test("calculate multipart route to remote node (pending htlcs)") {
|
||||
// +----- B -----+
|
||||
// | |
|
||||
// A----- C ---- E
|
||||
// | |
|
||||
// +----- D -----+
|
||||
val (amount, maxFee) = (15000 msat, 100 msat)
|
||||
val edge_ab = makeEdge(1L, a, b, 1 msat, 0, minHtlc = 100 msat, balance_opt = Some(5000 msat))
|
||||
val edge_be = makeEdge(2L, b, e, 1 msat, 0, minHtlc = 100 msat, capacity = 5 sat)
|
||||
val g = DirectedGraph(List(
|
||||
// The A -> B -> E route is the most economic one, but we already have a pending HTLC in it.
|
||||
edge_ab,
|
||||
edge_be,
|
||||
makeEdge(3L, a, c, 50 msat, 0, minHtlc = 100 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(4L, c, e, 50 msat, 0, minHtlc = 100 msat, capacity = 25 sat),
|
||||
makeEdge(5L, a, d, 50 msat, 0, minHtlc = 100 msat, balance_opt = Some(10000 msat)),
|
||||
makeEdge(6L, d, e, 50 msat, 0, minHtlc = 100 msat, capacity = 25 sat)
|
||||
))
|
||||
|
||||
val pendingHtlcs = Seq(Route(5000 msat, ChannelHop(a, b, edge_ab.update) :: ChannelHop(b, e, edge_be.update) :: Nil))
|
||||
val Success(routes) = findMultiPartRoute(g, a, e, amount, maxFee, pendingHtlcs = pendingHtlcs, routeParams = DEFAULT_ROUTE_PARAMS, currentBlockHeight = 400000)
|
||||
assert(routes.forall(_.length == 2), routes)
|
||||
checkRouteAmounts(routes, amount, maxFee)
|
||||
checkIgnoredChannels(routes, 1L, 2L)
|
||||
}
|
||||
|
||||
test("calculate multipart route for full amount or fail", Tag("fuzzy")) {
|
||||
// +------------------------------------+
|
||||
// | |
|
||||
// | v
|
||||
// +---+ +---+ +---+
|
||||
// | A |-----+ +--------->| B |--->| C |
|
||||
// +---+ | | +---+ +---+
|
||||
// ^ | +---+ |
|
||||
// | +--->| E |----------+ |
|
||||
// | +---+ | |
|
||||
// | ^ v |
|
||||
// +---+ | +---+ |
|
||||
// | D |------------+ | F |<-----+
|
||||
// +---+ +---+
|
||||
// | ^
|
||||
// | |
|
||||
// +---------------------------+
|
||||
for (_ <- 1 to 100) {
|
||||
val amount = (100 + Random.nextInt(200000)).msat
|
||||
val maxFee = 50.msat.max(amount * 0.03)
|
||||
val g = DirectedGraph(List(
|
||||
makeEdge(1L, d, f, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat, balance_opt = Some(Random.nextInt((2 * amount.toLong).toInt).msat)),
|
||||
makeEdge(2L, d, a, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat, balance_opt = Some(Random.nextInt((2 * amount.toLong).toInt).msat)),
|
||||
makeEdge(3L, d, e, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat, balance_opt = Some(Random.nextInt((2 * amount.toLong).toInt).msat)),
|
||||
makeEdge(4L, a, c, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat),
|
||||
makeEdge(5L, a, e, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat),
|
||||
makeEdge(6L, e, f, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat),
|
||||
makeEdge(7L, e, b, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat),
|
||||
makeEdge(8L, b, c, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat),
|
||||
makeEdge(9L, c, f, Random.nextInt(250).msat, Random.nextInt(10000), minHtlc = Random.nextInt(100).msat, maxHtlc = Some((20000 + Random.nextInt(80000)).msat), CltvExpiryDelta(Random.nextInt(288)), capacity = (10 + Random.nextInt(100)).sat)
|
||||
))
|
||||
|
||||
findMultiPartRoute(g, d, f, amount, maxFee, routeParams = DEFAULT_ROUTE_PARAMS.copy(randomize = true), currentBlockHeight = 400000) match {
|
||||
case Success(routes) => checkRouteAmounts(routes, amount, maxFee)
|
||||
case Failure(ex) => assert(ex === RouteNotFound)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object RouteCalculationSpec {
|
||||
|
@ -967,7 +1545,7 @@ object RouteCalculationSpec {
|
|||
val DEFAULT_MAX_FEE = 100000 msat
|
||||
val DEFAULT_CAPACITY = 100000 sat
|
||||
|
||||
val DEFAULT_ROUTE_PARAMS = RouteParams(randomize = false, maxFeeBase = 21000 msat, maxFeePct = 0.03, routeMaxCltv = CltvExpiryDelta(2016), routeMaxLength = 6, ratios = None)
|
||||
val DEFAULT_ROUTE_PARAMS = RouteParams(randomize = false, 21000 msat, 0.03, 6, CltvExpiryDelta(2016), None, MultiPartParams(1000 msat, 10))
|
||||
|
||||
val DUMMY_SIG = Transactions.PlaceHolderSig
|
||||
|
||||
|
@ -1008,12 +1586,25 @@ object RouteCalculationSpec {
|
|||
htlcMaximumMsat = maxHtlc
|
||||
)
|
||||
|
||||
def hops2Ids(hops: Seq[ChannelHop]) = hops.map(hop => hop.lastUpdate.shortChannelId.toLong)
|
||||
def hops2Ids(hops: Seq[ChannelHop]): Seq[Long] = hops.map(hop => hop.lastUpdate.shortChannelId.toLong)
|
||||
|
||||
def route2Ids(route: Route) = hops2Ids(route.hops)
|
||||
def route2Ids(route: Route): Seq[Long] = hops2Ids(route.hops)
|
||||
|
||||
def route2Edges(route: Route) = route.hops.map(hop => GraphEdge(ChannelDesc(hop.lastUpdate.shortChannelId, hop.nodeId, hop.nextNodeId), hop.lastUpdate, 0 sat, None))
|
||||
def routes2Ids(routes: Seq[Route]): Set[Seq[Long]] = routes.map(route2Ids).toSet
|
||||
|
||||
def route2Nodes(route: Route) = route.hops.map(hop => (hop.nodeId, hop.nextNodeId))
|
||||
def route2Edges(route: Route): Seq[GraphEdge] = route.hops.map(hop => GraphEdge(ChannelDesc(hop.lastUpdate.shortChannelId, hop.nodeId, hop.nextNodeId), hop.lastUpdate, 0 sat, None))
|
||||
|
||||
def route2Nodes(route: Route): Seq[(PublicKey, PublicKey)] = route.hops.map(hop => (hop.nodeId, hop.nextNodeId))
|
||||
|
||||
def checkIgnoredChannels(routes: Seq[Route], shortChannelIds: Long*): Unit = {
|
||||
shortChannelIds.foreach(shortChannelId => routes.foreach(route => {
|
||||
assert(route.hops.forall(_.lastUpdate.shortChannelId.toLong != shortChannelId), route)
|
||||
}))
|
||||
}
|
||||
|
||||
def checkRouteAmounts(routes: Seq[Route], totalAmount: MilliSatoshi, maxFee: MilliSatoshi): Unit = {
|
||||
assert(routes.map(_.amount).sum == totalAmount, routes)
|
||||
assert(routes.map(_.fee).sum <= maxFee, routes)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -81,10 +81,7 @@ trait TestVectorsSpec extends AnyFunSuite with Logging {
|
|||
val funding_pubkey = funding_privkey.publicKey
|
||||
val per_commitment_point = PublicKey(hex"025f7117a78150fe2ef97db7cfc83bd57b2e2c0d0dd25eaf467a4a1c2a45ce1486")
|
||||
val htlc_privkey = Generators.derivePrivKey(payment_basepoint_secret, per_commitment_point)
|
||||
val payment_privkey = channelVersion.isSet(USE_STATIC_REMOTEKEY_BIT) match {
|
||||
case true => payment_basepoint_secret
|
||||
case false => htlc_privkey
|
||||
}
|
||||
val payment_privkey = if (channelVersion.hasStaticRemotekey) payment_basepoint_secret else htlc_privkey
|
||||
val delayed_payment_privkey = Generators.derivePrivKey(delayed_payment_basepoint_secret, per_commitment_point)
|
||||
val revocation_pubkey = PublicKey(hex"0212a140cd0c6539d07cd08dfe09984dec3251ea808b892efeac3ede9402bf2b19")
|
||||
val feerate_per_kw = 15000
|
||||
|
@ -101,10 +98,7 @@ trait TestVectorsSpec extends AnyFunSuite with Logging {
|
|||
val funding_privkey = PrivateKey(hex"1552dfba4f6cf29a62a0af13c8d6981d36d0ef8d61ba10fb0fe90da7634d7e1301")
|
||||
val funding_pubkey = funding_privkey.publicKey
|
||||
val htlc_privkey = Generators.derivePrivKey(payment_basepoint_secret, Local.per_commitment_point)
|
||||
val payment_privkey = channelVersion.isSet(USE_STATIC_REMOTEKEY_BIT) match {
|
||||
case true => payment_basepoint_secret
|
||||
case false => htlc_privkey
|
||||
}
|
||||
val payment_privkey = if (channelVersion.hasStaticRemotekey) payment_basepoint_secret else htlc_privkey
|
||||
}
|
||||
|
||||
val coinbaseTx = Transaction.read("01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff03510101ffffffff0100f2052a010000001976a9143ca33c2e4446f4a305f23c80df8ad1afdcf652f988ac00000000")
|
||||
|
@ -512,5 +506,5 @@ class DefaultCommitmentTestVectorSpec extends TestVectorsSpec {
|
|||
|
||||
class StaticRemoteKeyTestVectorSpec extends TestVectorsSpec {
|
||||
override def filename: String = "/bolt3-tx-test-vectors-static-remotekey-format.txt"
|
||||
override def channelVersion: ChannelVersion = ChannelVersion.USE_STATIC_REMOTEKEY
|
||||
override def channelVersion: ChannelVersion = ChannelVersion.STATIC_REMOTEKEY
|
||||
}
|
|
@ -33,15 +33,15 @@ import fr.acinq.eclair.router.Announcements
|
|||
import fr.acinq.eclair.transactions.Transactions.{CommitTx, InputInfo, TransactionWithInputInfo}
|
||||
import fr.acinq.eclair.transactions._
|
||||
import fr.acinq.eclair.wire.ChannelCodecs._
|
||||
import fr.acinq.eclair.{TestConstants, UInt64, randomBytes, randomBytes32, randomKey, _}
|
||||
import fr.acinq.eclair.wire.CommonCodecs.setCodec
|
||||
import fr.acinq.eclair.{TestConstants, UInt64, randomBytes32, randomKey, _}
|
||||
import org.json4s.JsonAST._
|
||||
import org.json4s.jackson.Serialization
|
||||
import org.json4s.{CustomKeySerializer, CustomSerializer}
|
||||
import org.scalatest.funsuite.AnyFunSuite
|
||||
import scodec.bits._
|
||||
import scodec.{Attempt, DecodeResult}
|
||||
import scodec.{Attempt, Codec, DecodeResult}
|
||||
|
||||
import scala.compat.Platform
|
||||
import scala.concurrent.duration._
|
||||
import scala.io.Source
|
||||
import scala.util.Random
|
||||
|
@ -68,7 +68,9 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
assert(keyPath === decoded.value)
|
||||
}
|
||||
|
||||
test("encode/decode channel version in a backward compatible way") {
|
||||
test("encode/decode channel version in a backward compatible way (legacy)") {
|
||||
val codec = LegacyChannelCodecs.channelVersionCodec
|
||||
|
||||
// before we had commitment version, public keys were stored first (they started with 0x02 and 0x03)
|
||||
val legacy02 = hex"02a06ea3081f0f7a8ce31eb4f0822d10d2da120d5a1b1451f0727f51c7372f0f9b"
|
||||
val legacy03 = hex"03d5c030835d6a6248b2d1d4cac60813838011b995a66b6f78dcc9fb8b5c40c3f3"
|
||||
|
@ -76,41 +78,37 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
val current03 = hex"010000000103d5c030835d6a6248b2d1d4cac60813838011b995a66b6f78dcc9fb8b5c40c3f3"
|
||||
val current04 = hex"010000000303d5c030835d6a6248b2d1d4cac60813838011b995a66b6f78dcc9fb8b5c40c3f3"
|
||||
|
||||
assert(channelVersionCodec.decode(legacy02.bits) === Attempt.successful(DecodeResult(ChannelVersion.ZEROES, legacy02.bits)))
|
||||
assert(channelVersionCodec.decode(legacy03.bits) === Attempt.successful(DecodeResult(ChannelVersion.ZEROES, legacy03.bits)))
|
||||
assert(channelVersionCodec.decode(current02.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current02.drop(5).bits)))
|
||||
assert(channelVersionCodec.decode(current03.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current03.drop(5).bits)))
|
||||
assert(channelVersionCodec.decode(current04.bits) === Attempt.successful(DecodeResult(ChannelVersion.STATIC_REMOTEKEY, current04.drop(5).bits)))
|
||||
assert(codec.decode(legacy02.bits) === Attempt.successful(DecodeResult(ChannelVersion.ZEROES, legacy02.bits)))
|
||||
assert(codec.decode(legacy03.bits) === Attempt.successful(DecodeResult(ChannelVersion.ZEROES, legacy03.bits)))
|
||||
assert(codec.decode(current02.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current02.drop(5).bits)))
|
||||
assert(codec.decode(current03.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current03.drop(5).bits)))
|
||||
assert(codec.decode(current04.bits) === Attempt.successful(DecodeResult(ChannelVersion.STATIC_REMOTEKEY, current04.drop(5).bits)))
|
||||
|
||||
assert(channelVersionCodec.encode(ChannelVersion.STANDARD) === Attempt.successful(hex"0100000001".bits))
|
||||
assert(channelVersionCodec.encode(ChannelVersion.STATIC_REMOTEKEY) === Attempt.successful(hex"0100000003".bits))
|
||||
assert(codec.encode(ChannelVersion.STANDARD) === Attempt.successful(hex"0100000001".bits))
|
||||
assert(codec.encode(ChannelVersion.STATIC_REMOTEKEY) === Attempt.successful(hex"0100000003".bits))
|
||||
}
|
||||
|
||||
test("encode/decode channel version") {
|
||||
val current02 = hex"0000000102a06ea3081f0f7a8ce31eb4f0822d10d2da120d5a1b1451f0727f51c7372f0f9b"
|
||||
val current03 = hex"0000000103d5c030835d6a6248b2d1d4cac60813838011b995a66b6f78dcc9fb8b5c40c3f3"
|
||||
val current04 = hex"0000000303d5c030835d6a6248b2d1d4cac60813838011b995a66b6f78dcc9fb8b5c40c3f3"
|
||||
|
||||
assert(channelVersionCodec.decode(current02.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current02.drop(4).bits)))
|
||||
assert(channelVersionCodec.decode(current03.bits) === Attempt.successful(DecodeResult(ChannelVersion.STANDARD, current03.drop(4).bits)))
|
||||
assert(channelVersionCodec.decode(current04.bits) === Attempt.successful(DecodeResult(ChannelVersion.STATIC_REMOTEKEY, current04.drop(4).bits)))
|
||||
|
||||
assert(channelVersionCodec.encode(ChannelVersion.STANDARD) === Attempt.successful(hex"00000001".bits))
|
||||
assert(channelVersionCodec.encode(ChannelVersion.STATIC_REMOTEKEY) === Attempt.successful(hex"00000003".bits))
|
||||
}
|
||||
|
||||
test("encode/decode localparams") {
|
||||
def roundtrip(localParams: LocalParams, codec: Codec[LocalParams]) = {
|
||||
val encoded = codec.encode(localParams).require
|
||||
val decoded = codec.decode(encoded).require
|
||||
assert(localParams === decoded.value)
|
||||
}
|
||||
|
||||
val o = LocalParams(
|
||||
nodeId = randomKey.publicKey,
|
||||
fundingKeyPath = DeterministicWallet.KeyPath(Seq(42L)),
|
||||
dustLimit = Satoshi(Random.nextInt(Int.MaxValue)),
|
||||
maxHtlcValueInFlightMsat = UInt64(Random.nextInt(Int.MaxValue)),
|
||||
channelReserve = Satoshi(Random.nextInt(Int.MaxValue)),
|
||||
htlcMinimum = MilliSatoshi(Random.nextInt(Int.MaxValue)),
|
||||
toSelfDelay = CltvExpiryDelta(Random.nextInt(Short.MaxValue)),
|
||||
maxAcceptedHtlcs = Random.nextInt(Short.MaxValue),
|
||||
defaultFinalScriptPubKey = randomBytes(10 + Random.nextInt(200)),
|
||||
localPaymentBasepoint = None,
|
||||
isFunder = Random.nextBoolean(),
|
||||
features = TestConstants.Alice.nodeParams.features)
|
||||
val encoded = localParamsCodec(ChannelVersion.ZEROES).encode(o).require
|
||||
val decoded = localParamsCodec(ChannelVersion.ZEROES).decode(encoded).require.value
|
||||
assert(decoded.localPaymentBasepoint.isEmpty)
|
||||
assert(o === decoded)
|
||||
|
||||
// Backwards-compatibility: decode localparams with global features.
|
||||
val withGlobalFeatures = hex"033b1d42aa7c6a1a3502cbcfe4d2787e9f96237465cd1ba675f50cadf0be17092500010000002a0000000026cb536b00000000568a2768000000004f182e8d0000000040dd1d3d10e3040d00422f82d368b09056d1dcb2d67c4e8cae516abbbc8932f2b7d8f93b3be8e8cc6b64bb164563d567189bad0e07e24e821795aaef2dcbb9e5c1ad579961680202b38de5dd5426c524c7523b1fcdcf8c600d47f4b96a6dd48516b8e0006e81c83464b2800db0f3f63ceeb23a81511d159bae9ad07d10c0d144ba2da6f0cff30e7154eb48c908e9000101000001044500"
|
||||
val withGlobalFeaturesDecoded = localParamsCodec(ChannelVersion.STANDARD).decode(withGlobalFeatures.bits).require.value
|
||||
assert(withGlobalFeaturesDecoded.features.toByteVector === hex"0a8a")
|
||||
|
||||
val o1 = LocalParams(
|
||||
nodeId = randomKey.publicKey,
|
||||
fundingKeyPath = DeterministicWallet.KeyPath(Seq(42L)),
|
||||
dustLimit = Satoshi(Random.nextInt(Int.MaxValue)),
|
||||
|
@ -120,13 +118,20 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
toSelfDelay = CltvExpiryDelta(Random.nextInt(Short.MaxValue)),
|
||||
maxAcceptedHtlcs = Random.nextInt(Short.MaxValue),
|
||||
defaultFinalScriptPubKey = Script.write(Script.pay2wpkh(PrivateKey(randomBytes32).publicKey)),
|
||||
localPaymentBasepoint = Some(PrivateKey(randomBytes32).publicKey),
|
||||
staticPaymentBasepoint = None,
|
||||
isFunder = Random.nextBoolean(),
|
||||
features = Features(randomBytes(256)))
|
||||
val encoded1 = localParamsCodec(ChannelVersion.STATIC_REMOTEKEY).encode(o1).require
|
||||
val decoded1 = localParamsCodec(ChannelVersion.STATIC_REMOTEKEY).decode(encoded1).require.value
|
||||
assert(decoded1.localPaymentBasepoint.isDefined)
|
||||
assert(o1 === decoded1)
|
||||
val o1 = o.copy(staticPaymentBasepoint = Some(PrivateKey(randomBytes32).publicKey))
|
||||
|
||||
roundtrip(o, localParamsCodec(ChannelVersion.ZEROES))
|
||||
roundtrip(o1, localParamsCodec(ChannelVersion.STATIC_REMOTEKEY))
|
||||
}
|
||||
|
||||
test("backward compatibility local params with global features") {
|
||||
// Backwards-compatibility: decode localparams with global features.
|
||||
val withGlobalFeatures = hex"033b1d42aa7c6a1a3502cbcfe4d2787e9f96237465cd1ba675f50cadf0be17092500010000002a0000000026cb536b00000000568a2768000000004f182e8d0000000040dd1d3d10e3040d00422f82d368b09056d1dcb2d67c4e8cae516abbbc8932f2b7d8f93b3be8e8cc6b64bb164563d567189bad0e07e24e821795aaef2dcbb9e5c1ad579961680202b38de5dd5426c524c7523b1fcdcf8c600d47f4b96a6dd48516b8e0006e81c83464b2800db0f3f63ceeb23a81511d159bae9ad07d10c0d144ba2da6f0cff30e7154eb48c908e9000101000001044500"
|
||||
val withGlobalFeaturesDecoded = LegacyChannelCodecs.localParamsCodec(ChannelVersion.STANDARD).decode(withGlobalFeatures.bits).require.value
|
||||
assert(withGlobalFeaturesDecoded.features.toByteVector === hex"0a8a")
|
||||
}
|
||||
|
||||
test("encode/decode remoteparams") {
|
||||
|
@ -169,6 +174,9 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
}
|
||||
|
||||
test("backward compatibility of htlc codec") {
|
||||
|
||||
val codec = LegacyChannelCodecs.htlcCodec
|
||||
|
||||
// these encoded HTLC were produced by a previous version of the codec (at commit 8932785e001ddfe32839b3f83468ea19cf00b289)
|
||||
val encodedHtlc1 = hex"89d5618930919bd77c07ea3931e7791010a9637c3d06e091b2dad38f21bbcffa00000000384ffa48000000003dbf35101f8724fbd36096ea5f462be20d20bc2f93ebc2c8a3f00b7a78c5158b5a0e9f6b1666923e800f1d073e2adcfba5904f1b8234af1c43a6e84a862a044d15f33addf8d41b3cfb7f96d815d2248322aeadd0ce7bacbcc44e611f66c35c439423c099e678c077203d5fc415ec37b798c6c74c9eed0806e6cb20f2b613855772c086cee60642b3b9c3919627c877a62a57dcf6ce003bedc53a8b7a0d7aa91b2304ef8b512fe1a9a043e410d7cd3009edffcb5d4c05cfb545aced4afea8bbe26c5ff492602edf9d4eb731541e60e48fd1ae5e33b04a614346fb16e09ccd9bcb8907fe9fc287757ea9280a03462299e950a274c1dc53fbae8c421e67d7de35709eda0f11bcd417c0f215667e8b8ccae1035d0281214af25bf690102b180e5d4b57323d02ab5cee5d3669b4300539d02eff553143f085cd70e5428b7af3262418aa7664d56c3fd29c00a2f88a6a5ee9685f45b6182c45d492b2170b092e4b5891247bcffe82623b86637bec291cca1dc729f5747d842ecdf2fc24eaf95c522cbebe9a841e7cff837e715b689f0b366b92a2850875636962ba42863ab6df12ee938ada6e6ae795f8b4fbe81adea478caa9899fed0d6ccdf7a2173b69b2d3ff1b93c82c08c4da63b426d2f94912109997e8ee5830c5ffe3b60c97438ae1521a2956e73a9a60f16dc13a5e6565904e04bf66ceda3db693fc7a0c6ad4f7dc8cb7f1ef54527c11589b7c35ce5b20e7f23a0ab107a406fa747435ff08096a7533a8ab7f5d3630d5c20c9161101f922c76079497e00e3ca62bce033b2bb065ea1733c50b5a06492d2b46715812003f29a8754b5dc1649082893e7be76550c58d98e81556e4ddf20a244f363bc23e756c95224335d0eeccd3da06a9161c4c72ae3d93afe902a806eadd2167d15c04cf3028fc61d0843bd270fd702a2c5af889ab5bc79a294847914f8dd409a9b990a96397d9046c385ca9810fb7c7b2c61491c67264257a601be7fe8c47a859b56af41caf06be7ea1cdb540719fc3bc2603675b79fd36a6f2911043b78da9f186d2a01f1209d0d91508e8ebecce09fd72823d0c166542f6d059fa8725d9d719a2532289c88f7a291a6bbe01f5b1f83cc2232d716f7dfc6a103fb8637d759aab939aaa278cffe04a64f4142564113080276bee7d3ec62e3f887838e3821f0dd713337972df994160edc29ccb9b9630c41a9ec7c994cbef2501a610e1c3684e697df230fd6f6f10526c9446e8307a1fb7e4988cdf7fc8aa32c8a09206113d8247aaae42e3942c0ffd291d67837d2c88231c85882667582eca1d2566134c4ee1301de8e1637f09467b473ba3e353992488048bd34b26dcc6f6f474751b7ac5bbad468c64eda2aeabfe6a92150a4faab142229d7934c4a24427441850d0deae5db802b02940435f39ceaa85e2d3d2269510881ab26926c3167487aa138d38b9cf650f59f0aa0b84297479271c2009cde61e5c58c26bf8a15aba86869af83941ec14972d93b6ae4a6ecf6584238150a61487d6bd394db40a10d710fd2d065850e52ea6536a74d88947448221c1ce493fecbf2070998e04d5263935488c2935f2d3afed4d0fc7472c03e652f928e6a18f78029043f219f652d992e104529149a978e5c660c0081fe6a179dbe62dcb597f3b4e497c6049b0255f8f306e4b18c97c339c98270abf86a4eb1af93b14d880eeda203bb3ba5b6e3113d0e003f8e55f3d446bd4dcda686b357ca0adf1fe25390767a40ff086a9258d04c19b0474488aaafac321f087d2bd0dc0e056ad9f5b5afa5f3d82bc3f18b33de9044529637fed05879f6bd440f331c06008dd38c2fb822c22fc4201e97f9ef9fc351807c045dece147d19fd01a68604c3cb6b5e0db1b4d1ebe387670021067d94206fbdc9ed33ac1f49d87f961cb5d44f48805e55f8637ca3de4ec9dd969944ed61de45970b7ef96d9f313a41de1cae380e0fe4b56729f275e2a0a87403c90e80"
|
||||
val encodedHtlc2 = hex"09d5618930919bd77c07ea3931e7791010a9637c3d06e091b2dad38f21bbcffa00000000384ffa48000000003dbf35101f8724fbd36096ea5f462be20d20bc2f93ebc2c8a3f00b7a78c5158b5a0e9f6b1666923e800f1d073e2adcfba5904f1b8234af1c43a6e84a862a044d15f33addf8d41b3cfb7f96d815d2248322aeadd0ce7bacbcc44e611f66c35c439423c099e678c077203d5fc415ec37b798c6c74c9eed0806e6cb20f2b613855772c086cee60642b3b9c3919627c877a62a57dcf6ce003bedc53a8b7a0d7aa91b2304ef8b512fe1a9a043e410d7cd3009edffcb5d4c05cfb545aced4afea8bbe26c5ff492602edf9d4eb731541e60e48fd1ae5e33b04a614346fb16e09ccd9bcb8907fe9fc287757ea9280a03462299e950a274c1dc53fbae8c421e67d7de35709eda0f11bcd417c0f215667e8b8ccae1035d0281214af25bf690102b180e5d4b57323d02ab5cee5d3669b4300539d02eff553143f085cd70e5428b7af3262418aa7664d56c3fd29c00a2f88a6a5ee9685f45b6182c45d492b2170b092e4b5891247bcffe82623b86637bec291cca1dc729f5747d842ecdf2fc24eaf95c522cbebe9a841e7cff837e715b689f0b366b92a2850875636962ba42863ab6df12ee938ada6e6ae795f8b4fbe81adea478caa9899fed0d6ccdf7a2173b69b2d3ff1b93c82c08c4da63b426d2f94912109997e8ee5830c5ffe3b60c97438ae1521a2956e73a9a60f16dc13a5e6565904e04bf66ceda3db693fc7a0c6ad4f7dc8cb7f1ef54527c11589b7c35ce5b20e7f23a0ab107a406fa747435ff08096a7533a8ab7f5d3630d5c20c9161101f922c76079497e00e3ca62bce033b2bb065ea1733c50b5a06492d2b46715812003f29a8754b5dc1649082893e7be76550c58d98e81556e4ddf20a244f363bc23e756c95224335d0eeccd3da06a9161c4c72ae3d93afe902a806eadd2167d15c04cf3028fc61d0843bd270fd702a2c5af889ab5bc79a294847914f8dd409a9b990a96397d9046c385ca9810fb7c7b2c61491c67264257a601be7fe8c47a859b56af41caf06be7ea1cdb540719fc3bc2603675b79fd36a6f2911043b78da9f186d2a01f1209d0d91508e8ebecce09fd72823d0c166542f6d059fa8725d9d719a2532289c88f7a291a6bbe01f5b1f83cc2232d716f7dfc6a103fb8637d759aab939aaa278cffe04a64f4142564113080276bee7d3ec62e3f887838e3821f0dd713337972df994160edc29ccb9b9630c41a9ec7c994cbef2501a610e1c3684e697df230fd6f6f10526c9446e8307a1fb7e4988cdf7fc8aa32c8a09206113d8247aaae42e3942c0ffd291d67837d2c88231c85882667582eca1d2566134c4ee1301de8e1637f09467b473ba3e353992488048bd34b26dcc6f6f474751b7ac5bbad468c64eda2aeabfe6a92150a4faab142229d7934c4a24427441850d0deae5db802b02940435f39ceaa85e2d3d2269510881ab26926c3167487aa138d38b9cf650f59f0aa0b84297479271c2009cde61e5c58c26bf8a15aba86869af83941ec14972d93b6ae4a6ecf6584238150a61487d6bd394db40a10d710fd2d065850e52ea6536a74d88947448221c1ce493fecbf2070998e04d5263935488c2935f2d3afed4d0fc7472c03e652f928e6a18f78029043f219f652d992e104529149a978e5c660c0081fe6a179dbe62dcb597f3b4e497c6049b0255f8f306e4b18c97c339c98270abf86a4eb1af93b14d880eeda203bb3ba5b6e3113d0e003f8e55f3d446bd4dcda686b357ca0adf1fe25390767a40ff086a9258d04c19b0474488aaafac321f087d2bd0dc0e056ad9f5b5afa5f3d82bc3f18b33de9044529637fed05879f6bd440f331c06008dd38c2fb822c22fc4201e97f9ef9fc351807c045dece147d19fd01a68604c3cb6b5e0db1b4d1ebe387670021067d94206fbdc9ed33ac1f49d87f961cb5d44f48805e55f8637ca3de4ec9dd969944ed61de45970b7ef96d9f313a41de1cae380e0fe4b56729f275e2a0a87403c90e80"
|
||||
|
@ -187,16 +195,16 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
)
|
||||
val remaining = bin"0000000" // 7 bits remainder because the direction is encoded with 1 bit and we are dealing with bytes
|
||||
|
||||
val DecodeResult(h1, r1) = htlcCodec.decode(encodedHtlc1.toBitVector).require
|
||||
val DecodeResult(h2, r2) = htlcCodec.decode(encodedHtlc2.toBitVector).require
|
||||
val DecodeResult(h1, r1) = codec.decode(encodedHtlc1.toBitVector).require
|
||||
val DecodeResult(h2, r2) = codec.decode(encodedHtlc2.toBitVector).require
|
||||
|
||||
assert(h1 == IncomingHtlc(ref))
|
||||
assert(h2 == OutgoingHtlc(ref))
|
||||
assert(r1 == remaining)
|
||||
assert(r2 == remaining)
|
||||
|
||||
assert(htlcCodec.encode(h1).require.bytes === encodedHtlc1)
|
||||
assert(htlcCodec.encode(h2).require.bytes === encodedHtlc2)
|
||||
assert(codec.encode(h1).require.bytes === encodedHtlc1)
|
||||
assert(codec.encode(h2).require.bytes === encodedHtlc2)
|
||||
}
|
||||
|
||||
test("encode/decode commitment spec") {
|
||||
|
@ -232,7 +240,7 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
test("encode/decode origin") {
|
||||
val id = UUID.randomUUID()
|
||||
assert(originCodec.decodeValue(originCodec.encode(Local(id, Some(ActorSystem("test").deadLetters))).require).require === Local(id, None))
|
||||
assert(originCodec.decodeValue(hex"0001 0123456789abcdef0123456789abcdef".bits).require === Local(UNKNOWN_UUID, None))
|
||||
val ZERO_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
val relayed = Relayed(randomBytes32, 4324, 12000000 msat, 11000000 msat)
|
||||
assert(originCodec.decodeValue(originCodec.encode(relayed).require).require === relayed)
|
||||
val trampolineRelayed = TrampolineRelayed((randomBytes32, 1L) :: (randomBytes32, 1L) :: (randomBytes32, 2L) :: Nil, None)
|
||||
|
@ -311,8 +319,8 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
assert(System.currentTimeMillis.milliseconds.toSeconds - data_new.asInstanceOf[DATA_WAIT_FOR_FUNDING_CONFIRMED].waitingSince < 3600) // we just set this timestamp to current time
|
||||
// and re-encode it with the new codec
|
||||
val bin_new = ByteVector(stateDataCodec.encode(data_new).require.toByteVector.toArray)
|
||||
// data should now be encoded under the new format, with version=0 and type=8
|
||||
assert(bin_new.startsWith(hex"000008"))
|
||||
// data should now be encoded under the new format
|
||||
assert(bin_new.startsWith(hex"010020"))
|
||||
// now let's decode it again
|
||||
val data_new2 = stateDataCodec.decode(bin_new.toBitVector).require.value
|
||||
// data should match perfectly
|
||||
|
@ -326,8 +334,8 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
val u2 = hex"A94A853FCDE515F89259E03D10368B1A600B3BF78F6BD5C968469C0816F45EFF7878714DF26B580D5A304334E46816D5AC37B098EBC46C1CE47E37504D052DD643497FD7F826957108F4A30FD9CEC3AEBA79972084E90EAD01EA33090000000013AB9500006E00005D1149290001009000000000000003E8000003E800000001".bits
|
||||
|
||||
// check that we decode correct length, and that we just take a peek without actually consuming data
|
||||
assert(noUnknownFieldsChannelUpdateSizeCodec.decode(u1) == Attempt.successful(DecodeResult(136, u1)))
|
||||
assert(noUnknownFieldsChannelUpdateSizeCodec.decode(u2) == Attempt.successful(DecodeResult(128, u2)))
|
||||
assert(LegacyChannelCodecs.noUnknownFieldsChannelUpdateSizeCodec.decode(u1) == Attempt.successful(DecodeResult(136, u1)))
|
||||
assert(LegacyChannelCodecs.noUnknownFieldsChannelUpdateSizeCodec.decode(u2) == Attempt.successful(DecodeResult(128, u2)))
|
||||
}
|
||||
|
||||
test("backward compatibility DATA_NORMAL_COMPAT_03_Codec (roundtrip)") {
|
||||
|
@ -354,8 +362,8 @@ class ChannelCodecsSpec extends AnyFunSuite {
|
|||
val oldnormal = stateDataCodec.decode(oldbin.bits).require.value
|
||||
// and we encode with new codec
|
||||
val newbin = stateDataCodec.encode(oldnormal).require.bytes
|
||||
// make sure that encoding used the new 0x10 codec
|
||||
assert(newbin.startsWith(hex"000010"))
|
||||
// make sure that encoding used the new codec
|
||||
assert(newbin.startsWith(hex"010022"))
|
||||
// make sure that roundtrip yields the same data
|
||||
val newnormal = stateDataCodec.decode(newbin.bits).require.value
|
||||
assert(newnormal === oldnormal)
|
||||
|
@ -424,7 +432,7 @@ object ChannelCodecsSpec {
|
|||
toSelfDelay = CltvExpiryDelta(144),
|
||||
maxAcceptedHtlcs = 50,
|
||||
defaultFinalScriptPubKey = ByteVector.empty,
|
||||
localPaymentBasepoint = None,
|
||||
staticPaymentBasepoint = None,
|
||||
isFunder = true,
|
||||
features = Features.empty)
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import fr.acinq.eclair.payment.PaymentRequest
|
|||
import FormParamExtractors._
|
||||
import JsonSupport.serialization
|
||||
import JsonSupport.json4sJacksonFormats
|
||||
import fr.acinq.eclair.ApiTypes.ChannelIdentifier
|
||||
import shapeless.HNil
|
||||
import spray.http.{ContentTypes, HttpEntity, HttpResponse, StatusCodes}
|
||||
import spray.httpx.marshalling.Marshaller
|
||||
|
@ -37,7 +38,9 @@ trait ExtraDirectives extends Directives {
|
|||
|
||||
// named and typed URL parameters used across several routes
|
||||
val shortChannelIdFormParam_opt = "shortChannelId".as[Option[ShortChannelId]](shortChannelIdUnmarshaller)
|
||||
val shortChannelIdsFormParam_opt = "shortChannelIds".as[Option[List[ShortChannelId]]](shortChannelIdsUnmarshaller)
|
||||
val channelIdFormParam_opt = "channelId".as[Option[ByteVector32]](sha256HashUnmarshaller)
|
||||
val channelIdsFormParam_opt = "channelIds".as[Option[List[ByteVector32]]](sha256HashesUnmarshaller)
|
||||
val nodeIdFormParam_opt = "nodeId".as[Option[PublicKey]](publicKeyUnmarshaller)
|
||||
val paymentHashFormParam_opt = "paymentHash".as[Option[ByteVector32]](sha256HashUnmarshaller)
|
||||
val fromFormParam_opt = "from".as[Long]
|
||||
|
@ -54,11 +57,20 @@ trait ExtraDirectives extends Directives {
|
|||
}
|
||||
|
||||
import shapeless.::
|
||||
def withChannelIdentifier: Directive1[Either[ByteVector32, ShortChannelId]] = formFields(channelIdFormParam_opt, shortChannelIdFormParam_opt).hflatMap {
|
||||
case None :: None :: HNil => reject(MalformedFormFieldRejection("channelId/shortChannelId", "Must specify either the channelId or shortChannelId"))
|
||||
def withChannelIdentifier: Directive1[ChannelIdentifier] = formFields(channelIdFormParam_opt, shortChannelIdFormParam_opt).hflatMap {
|
||||
case Some(channelId) :: None :: HNil => provide(Left(channelId))
|
||||
case None :: Some(shortChannelId) :: HNil => provide(Right(shortChannelId))
|
||||
case _ => reject(MalformedFormFieldRejection("channelId/shortChannelId", "Must specify either the channelId or shortChannelId"))
|
||||
case _ => reject(MalformedFormFieldRejection("channelId/shortChannelId", "Must specify either the channelId or shortChannelId (not both)"))
|
||||
}
|
||||
|
||||
def withChannelsIdentifier: Directive1[List[ChannelIdentifier]] = formFields(channelIdFormParam_opt, channelIdsFormParam_opt, shortChannelIdFormParam_opt, shortChannelIdsFormParam_opt).hflatMap {
|
||||
case None :: None :: None :: None :: HNil => reject(MalformedFormFieldRejection("channelId(s)/shortChannelId(s)", "Must specify channelId, channelIds, shortChannelId or shortChannelIds"))
|
||||
case channelId_opt :: channelIds_opt :: shortChannelId_opt :: shortChannelIds_opt :: HNil =>
|
||||
val channelId: List[ChannelIdentifier] = channelId_opt.map(cid => Left(cid)).toList
|
||||
val channelIds: List[ChannelIdentifier] = channelIds_opt.map(_.map(cid => Left(cid))).toList.flatten
|
||||
val shortChannelId: List[ChannelIdentifier] = shortChannelId_opt.map(scid => Right(scid)).toList
|
||||
val shortChannelIds: List[ChannelIdentifier] = shortChannelIds_opt.map(_.map(scid => Right(scid))).toList.flatten
|
||||
provide((channelId ++ channelIds ++ shortChannelId ++ shortChannelIds).distinct)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -46,6 +46,10 @@ object FormParamExtractors {
|
|||
ByteVector32.fromValidHex(bin)
|
||||
}
|
||||
|
||||
implicit val sha256HashesUnmarshaller: Deserializer[Option[String], Option[List[ByteVector32]]] = strictDeserializer { bin =>
|
||||
bin.split(",").map(ByteVector32.fromValidHex).toList
|
||||
}
|
||||
|
||||
implicit val bolt11Unmarshaller: Deserializer[Option[String], Option[PaymentRequest]] = strictDeserializer { rawRequest =>
|
||||
PaymentRequest.read(rawRequest)
|
||||
}
|
||||
|
@ -54,6 +58,10 @@ object FormParamExtractors {
|
|||
ShortChannelId(str)
|
||||
}
|
||||
|
||||
implicit val shortChannelIdsUnmarshaller: Deserializer[Option[String], Option[List[ShortChannelId]]] = strictDeserializer { str =>
|
||||
str.split(",").map(ShortChannelId(_)).toList
|
||||
}
|
||||
|
||||
implicit val javaUUIDUnmarshaller: Deserializer[Option[String], Option[UUID]] = strictDeserializer { str =>
|
||||
UUID.fromString(str)
|
||||
}
|
||||
|
@ -89,7 +97,6 @@ object FormParamExtractors {
|
|||
MilliSatoshi(str.toLong)
|
||||
}
|
||||
|
||||
|
||||
def strictDeserializer[T](f: String => T): Deserializer[Option[String], Option[T]] = Deserializer.fromFunction2Converter {
|
||||
case Some(str) => Some(f(str))
|
||||
case None => None
|
||||
|
|
|
@ -22,27 +22,26 @@ import akka.util.Timeout
|
|||
import com.google.common.net.HostAndPort
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{ByteVector32, Satoshi}
|
||||
import fr.acinq.eclair.{CltvExpiryDelta, Eclair, MilliSatoshi}
|
||||
import fr.acinq.eclair.api.FormParamExtractors._
|
||||
import fr.acinq.eclair.io.NodeURI
|
||||
import fr.acinq.eclair.payment.{PaymentReceived, PaymentRequest, _}
|
||||
import fr.acinq.eclair.payment.PaymentRequest
|
||||
import fr.acinq.eclair.{CltvExpiryDelta, Eclair, MilliSatoshi}
|
||||
import grizzled.slf4j.Logging
|
||||
import scodec.bits.ByteVector
|
||||
import spray.http.CacheDirectives.public
|
||||
import spray.http.{HttpMethods, StatusCodes}
|
||||
import spray.http.CacheDirectives.{public, _}
|
||||
import spray.http.HttpHeaders._
|
||||
import spray.http.CacheDirectives._
|
||||
import spray.http.{HttpMethods, StatusCodes}
|
||||
import spray.routing.authentication.{BasicAuth, UserPass}
|
||||
import spray.routing.{ExceptionHandler, HttpServiceActor, MalformedFormFieldRejection, Route}
|
||||
import spray.routing.{ExceptionHandler, MalformedFormFieldRejection, Route}
|
||||
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
import scala.concurrent.duration._
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
case class ErrorResponse(error: String)
|
||||
|
||||
trait Service extends ExtraDirectives with Logging {
|
||||
|
||||
import JsonSupport.{json4sFormats, serialization, json4sMarshaller}
|
||||
import JsonSupport.json4sMarshaller
|
||||
|
||||
implicit val ec = ExecutionContext.global
|
||||
implicit val timeout = Timeout(30 seconds)
|
||||
|
@ -74,7 +73,7 @@ trait Service extends ExtraDirectives with Logging {
|
|||
authenticate(BasicAuth(userPassAuthenticator _, realm = "Access restricted")) { _ =>
|
||||
post {
|
||||
path("getinfo") {
|
||||
complete(eclairApi.getInfoResponse())
|
||||
complete(eclairApi.getInfo())
|
||||
} ~
|
||||
path("connect") {
|
||||
formFields("uri".as[Option[NodeURI]]) { uri =>
|
||||
|
@ -97,22 +96,22 @@ trait Service extends ExtraDirectives with Logging {
|
|||
}
|
||||
} ~
|
||||
path("updaterelayfee") {
|
||||
withChannelIdentifier { channelIdentifier =>
|
||||
withChannelsIdentifier { channels =>
|
||||
formFields("feeBaseMsat".as[Option[MilliSatoshi]](millisatoshiUnmarshaller), "feeProportionalMillionths".as[Option[Long]]) { (feeBase, feeProportional) =>
|
||||
complete(eclairApi.updateRelayFee(channelIdentifier :: Nil, feeBase.get, feeProportional.get))
|
||||
complete(eclairApi.updateRelayFee(channels, feeBase.get, feeProportional.get))
|
||||
}
|
||||
}
|
||||
} ~
|
||||
path("close") {
|
||||
withChannelIdentifier { channelIdentifier =>
|
||||
withChannelsIdentifier { channels =>
|
||||
formFields("scriptPubKey".as[Option[ByteVector]](binaryDataUnmarshaller)) { scriptPubKey_opt =>
|
||||
complete(eclairApi.close(channelIdentifier :: Nil, scriptPubKey_opt))
|
||||
complete(eclairApi.close(channels, scriptPubKey_opt))
|
||||
}
|
||||
}
|
||||
} ~
|
||||
path("forceclose") {
|
||||
withChannelIdentifier { channelIdentifier =>
|
||||
complete(eclairApi.forceClose(channelIdentifier :: Nil))
|
||||
withChannelsIdentifier { channels =>
|
||||
complete(eclairApi.forceClose(channels))
|
||||
}
|
||||
} ~
|
||||
path("peers") {
|
||||
|
@ -134,6 +133,9 @@ trait Service extends ExtraDirectives with Logging {
|
|||
path("allchannels") {
|
||||
complete(eclairApi.allChannels())
|
||||
} ~
|
||||
path("networkstats") {
|
||||
complete(eclairApi.networkStats())
|
||||
} ~
|
||||
path("allupdates") {
|
||||
formFields(nodeIdFormParam_opt) { nodeId_opt =>
|
||||
complete(eclairApi.allUpdates(nodeId_opt))
|
||||
|
@ -176,6 +178,10 @@ trait Service extends ExtraDirectives with Logging {
|
|||
(amountMsat, recipientAmountMsat_opt, invoice, finalCltvExpiry, route, externalId_opt, parentId_opt, trampolineSecret_opt, trampolineFeesMsat_opt, trampolineCltvExpiry_opt, trampolineNodes_opt) =>
|
||||
complete(eclairApi.sendToRoute(amountMsat.get, recipientAmountMsat_opt, externalId_opt, parentId_opt, invoice.get, CltvExpiryDelta(finalCltvExpiry), route.get, trampolineSecret_opt, trampolineFeesMsat_opt, trampolineCltvExpiry_opt.map(CltvExpiryDelta), trampolineNodes_opt.getOrElse(Nil)))
|
||||
}
|
||||
} ~ path("sendonchain") {
|
||||
formFields("address".as[String], "amountSatoshis".as[Option[Satoshi]](satoshiUnmarshaller), "confirmationTarget".as[Long]) { (address, amount_opt, confirmationTarget) =>
|
||||
complete(eclairApi.sendOnChain(address, amount_opt.get, confirmationTarget))
|
||||
}
|
||||
} ~
|
||||
path("getsentinfo") {
|
||||
formFields("id".as[Option[UUID]]) { id =>
|
||||
|
@ -226,6 +232,17 @@ trait Service extends ExtraDirectives with Logging {
|
|||
} ~
|
||||
path("usablebalances") {
|
||||
complete(eclairApi.usableBalances())
|
||||
} ~
|
||||
path("onchainbalance") {
|
||||
complete(eclairApi.onChainBalance())
|
||||
} ~
|
||||
path("getnewaddress") {
|
||||
complete(eclairApi.newAddress())
|
||||
} ~
|
||||
path("onchaintransactions") {
|
||||
formFields("count".as[Int].?, "skip".as[Int].?) { (count_opt, skip_opt) =>
|
||||
complete(eclairApi.onChainTransactions(count_opt.getOrElse(10), skip_opt.getOrElse(0)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":"1.0.0-SNAPSHOT-e3f1ec0","nodeId":"03af0ed6052cf28d670665549bc86f4b721c9fdb309d40c58f5811f63966e005d0","alias":"alice","color":"#000102","features":{"activated":[{"name":"option_data_loss_protect","support":"mandatory"},{"name":"gossip_queries_ex","support":"optional"}],"unknown":[]},"chainHash":"06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f","blockHeight":9999,"publicAddresses":["localhost:9731"]}
|
||||
{"version":"1.0.0-SNAPSHOT-e3f1ec0","nodeId":"03af0ed6052cf28d670665549bc86f4b721c9fdb309d40c58f5811f63966e005d0","alias":"alice","color":"#000102","features":{"activated":[{"name":"option_data_loss_protect","support":"mandatory"},{"name":"gossip_queries_ex","support":"optional"}],"unknown":[]},"chainHash":"06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f","network":"regtest","blockHeight":9999,"publicAddresses":["localhost:9731"]}
|
|
@ -22,6 +22,7 @@ import akka.util.Timeout
|
|||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{Block, ByteVector32}
|
||||
import fr.acinq.eclair.ApiTypes.ChannelIdentifier
|
||||
import fr.acinq.eclair.FeatureSupport.{Mandatory, Optional}
|
||||
import fr.acinq.eclair.Features.{ChannelRangeQueriesExtended, OptionDataLossProtect}
|
||||
import fr.acinq.eclair.channel.ChannelCommandResponse
|
||||
import fr.acinq.eclair.channel.ChannelCommandResponse.ChannelClosed
|
||||
|
@ -30,7 +31,7 @@ import fr.acinq.eclair.io.Peer.PeerInfo
|
|||
import fr.acinq.eclair.payment._
|
||||
import fr.acinq.eclair.payment.relay.Relayer.UsableBalance
|
||||
import fr.acinq.eclair.payment.send.PaymentInitiator.SendPaymentToRouteResponse
|
||||
import fr.acinq.eclair.wire.NodeAddress
|
||||
import fr.acinq.eclair.wire.{Color, NodeAddress}
|
||||
import fr.acinq.eclair.{CltvExpiryDelta, Eclair, MilliSatoshi, _}
|
||||
import org.mockito.scalatest.IdiomaticMockito
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
@ -171,15 +172,16 @@ class ApiServiceSpec extends AnyFunSuiteLike with ScalatestRouteTest with RouteT
|
|||
val mockEclair = mock[Eclair]
|
||||
val service = new MockService(mockEclair)
|
||||
|
||||
mockEclair.getInfoResponse()(any[Timeout]) returns Future.successful(GetInfoResponse(
|
||||
mockEclair.getInfo()(any[Timeout]) returns Future.successful(GetInfoResponse(
|
||||
version = "1.0.0-SNAPSHOT-e3f1ec0",
|
||||
color = Color(0.toByte, 1.toByte, 2.toByte).toString,
|
||||
features = Features(Set(ActivatedFeature(OptionDataLossProtect, Mandatory), ActivatedFeature(ChannelRangeQueriesExtended, Optional))),
|
||||
nodeId = aliceNodeId,
|
||||
alias = "alice",
|
||||
chainHash = ByteVector32(hex"06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f"),
|
||||
network = "regtest",
|
||||
blockHeight = 9999,
|
||||
publicAddresses = NodeAddress.fromParts("localhost", 9731).get :: Nil,
|
||||
version = "1.0.0-SNAPSHOT-e3f1ec0",
|
||||
color = "#000102",
|
||||
features = Features(Set(ActivatedFeature(OptionDataLossProtect, FeatureSupport.Mandatory), ActivatedFeature(ChannelRangeQueriesExtended, FeatureSupport.Optional)))
|
||||
publicAddresses = NodeAddress.fromParts("localhost", 9731).get :: Nil
|
||||
))
|
||||
|
||||
Post("/getinfo") ~>
|
||||
|
@ -189,8 +191,8 @@ class ApiServiceSpec extends AnyFunSuiteLike with ScalatestRouteTest with RouteT
|
|||
assert(handled)
|
||||
assert(status == OK)
|
||||
val resp = responseAs[String]
|
||||
assert(resp.toString.contains(aliceNodeId.toString))
|
||||
mockEclair.getInfoResponse()(any[Timeout]).wasCalled(once)
|
||||
assert(resp.contains(aliceNodeId.toString))
|
||||
mockEclair.getInfo()(any[Timeout]).wasCalled(once)
|
||||
matchTestJson("getinfo", resp)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue