mirror of
https://github.com/ACINQ/eclair.git
synced 2025-03-13 19:37:35 +01:00
Merge branch 'master' into android
This commit is contained in:
commit
74ca047535
42 changed files with 918 additions and 546 deletions
|
@ -1,40 +0,0 @@
|
|||
## JSON-RPC API
|
||||
|
||||
:warning: Note this interface is being deprecated.
|
||||
|
||||
method | params | description
|
||||
------------- |----------------------------------------------------------------------------------------|-----------------------------------------------------------
|
||||
getinfo | | return basic node information (id, chain hash, current block height)
|
||||
connect | nodeId, host, port | open a secure connection to a lightning node
|
||||
connect | uri | open a secure connection to a lightning node
|
||||
open | nodeId, fundingSatoshis, pushMsat = 0, feerateSatPerByte = ?, channelFlags = 0x01 | open a channel with another lightning node, by default push = 0, feerate for the funding tx targets 6 blocks, and channel is announced
|
||||
updaterelayfee | channelId, feeBaseMsat, feeProportionalMillionths | update relay fee for payments going through this channel
|
||||
peers | | list existing local peers
|
||||
channels | | list existing local channels
|
||||
channels | nodeId | list existing local channels opened with a particular nodeId
|
||||
channel | channelId | retrieve detailed information about a given channel
|
||||
channelstats | | retrieves statistics about channel usage (fees, number and average amount of payments)
|
||||
allnodes | | list all known nodes
|
||||
allchannels | | list all known channels
|
||||
allupdates | | list all channels updates
|
||||
allupdates | nodeId | list all channels updates for this nodeId
|
||||
receive | description | generate a payment request without a required amount (can be useful for donations)
|
||||
receive | amountMsat, description | generate a payment request for a given amount
|
||||
receive | amountMsat, description, expirySeconds | generate a payment request for a given amount that expires after given number of seconds
|
||||
parseinvoice | paymentRequest | returns node, amount and payment hash in a payment request
|
||||
findroute | paymentRequest | returns nodes and channels of the route for this payment request if there is any
|
||||
findroute | paymentRequest, amountMsat | returns nodes and channels of the route for this payment request and amount, if there is any
|
||||
findroute | nodeId, amountMsat | returns nodes and channels of the route to the nodeId, if there is any
|
||||
send | amountMsat, paymentHash, nodeId | send a payment to a lightning node
|
||||
send | paymentRequest | send a payment to a lightning node using a BOLT11 payment request
|
||||
send | paymentRequest, amountMsat | send a payment to a lightning node using a BOLT11 payment request and a custom amount
|
||||
checkpayment | paymentHash | returns true if the payment has been received, false otherwise
|
||||
checkpayment | paymentRequest | returns true if the payment has been received, false otherwise
|
||||
close | channelId | close a channel
|
||||
close | channelId, scriptPubKey | close a channel and send the funds to the given scriptPubKey
|
||||
forceclose | channelId | force-close a channel by publishing the local commitment tx (careful: this is more expensive than a regular close and will incur a delay before funds are spendable)"
|
||||
audit | | list all send/received/relayed payments
|
||||
audit | from, to | list send/received/relayed payments in that interval (from <= timestamp < to)
|
||||
networkfees | | list all network fees paid to the miners, by transaction
|
||||
networkfees |from, to | list network fees paid to the miners, by transaction, in that interval (from <= timestamp < to)
|
||||
help | | display available methods
|
|
@ -32,8 +32,6 @@ Eclair offers a feature rich HTTP API that enables application developers to eas
|
|||
|
||||
For more information please visit the [API documentation website](https://acinq.github.io/eclair).
|
||||
|
||||
:warning: You can still use the old API by setting the `eclair.api.use-old-api=true` parameter, but it is now deprecated and will soon be removed. The old documentation is still available [here](OLD-API-DOCS.md).
|
||||
|
||||
## Installation
|
||||
|
||||
### Configuring Bitcoin Core
|
||||
|
@ -60,7 +58,7 @@ Eclair is developed in [Scala](https://www.scala-lang.org/), a powerful function
|
|||
* eclair-node, which is a headless application that you can run on servers and desktops, and control from the command line
|
||||
* eclair-node-gui, which also includes a JavaFX GUI
|
||||
|
||||
To run Eclair, you first need to install Java, we recommend that you use [OpenJDK 11](https://jdk.java.net/11/). Eclair will also run on Oracle JDK 1.8, Oracle JDK 11, and other versions of OpenJDK but we don't recommend using them.
|
||||
To run Eclair, you first need to install Java, we recommend that you use [OpenJDK 11](https://adoptopenjdk.net/?variant=openjdk11&jvmVariant=hotspot). Eclair will also run on Oracle JDK 1.8, Oracle JDK 11, and other versions of OpenJDK but we don't recommend using them.
|
||||
|
||||
Then download our latest [release](https://github.com/ACINQ/eclair/releases) and depending on whether or not you want a GUI run the following command:
|
||||
* with GUI:
|
||||
|
|
|
@ -13,7 +13,6 @@ eclair {
|
|||
binding-ip = "127.0.0.1"
|
||||
port = 8080
|
||||
password = "" // password for basic auth, must be non empty if json-rpc api is enabled
|
||||
use-old-api = false
|
||||
}
|
||||
|
||||
watcher-type = "electrum"
|
||||
|
|
|
@ -23,19 +23,20 @@ import akka.pattern._
|
|||
import akka.util.Timeout
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.{ByteVector32, MilliSatoshi, Satoshi}
|
||||
import fr.acinq.eclair.TimestampQueryFilters._
|
||||
import fr.acinq.eclair.channel.Register.{Forward, ForwardShortId}
|
||||
import fr.acinq.eclair.channel._
|
||||
import fr.acinq.eclair.db.{IncomingPayment, NetworkFee, OutgoingPayment, Stats}
|
||||
import fr.acinq.eclair.io.Peer.{GetPeerInfo, PeerInfo}
|
||||
import fr.acinq.eclair.io.{NodeURI, Peer, Switchboard}
|
||||
import fr.acinq.eclair.io.{NodeURI, Peer}
|
||||
import fr.acinq.eclair.payment.PaymentLifecycle._
|
||||
import fr.acinq.eclair.payment._
|
||||
import fr.acinq.eclair.router.{ChannelDesc, RouteRequest, RouteResponse, Router}
|
||||
import fr.acinq.eclair.wire.{ChannelAnnouncement, ChannelUpdate, NodeAddress, NodeAnnouncement}
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.concurrent.Future
|
||||
import scala.concurrent.duration._
|
||||
import fr.acinq.eclair.payment.{PaymentReceived, PaymentRelayed, PaymentRequest, PaymentSent}
|
||||
import fr.acinq.eclair.wire.{ChannelAnnouncement, ChannelUpdate, NodeAddress, NodeAnnouncement}
|
||||
import TimestampQueryFilters._
|
||||
|
||||
case class GetInfoResponse(nodeId: PublicKey, alias: String, chainHash: ByteVector32, blockHeight: Int, publicAddresses: Seq[NodeAddress])
|
||||
|
||||
|
@ -105,6 +106,7 @@ trait Eclair {
|
|||
|
||||
def getInfoResponse()(implicit timeout: Timeout): Future[GetInfoResponse]
|
||||
|
||||
def usableBalances()(implicit timeout: Timeout): Future[Iterable[UsableBalances]]
|
||||
}
|
||||
|
||||
class EclairImpl(appKit: Kit) extends Eclair {
|
||||
|
@ -269,4 +271,5 @@ class EclairImpl(appKit: Kit) extends Eclair {
|
|||
publicAddresses = appKit.nodeParams.publicAddresses)
|
||||
)
|
||||
|
||||
override def usableBalances()(implicit timeout: Timeout): Future[Iterable[UsableBalances]] = (appKit.relayer ? GetUsableBalances).mapTo[Iterable[UsableBalances]]
|
||||
}
|
||||
|
|
|
@ -108,6 +108,7 @@ object NodeParams {
|
|||
seedPath.exists() match {
|
||||
case true => ByteVector(Files.toByteArray(seedPath))
|
||||
case false =>
|
||||
datadir.mkdirs()
|
||||
val seed = randomBytes32
|
||||
Files.write(seed.toArray, seedPath)
|
||||
seed
|
||||
|
|
|
@ -91,7 +91,7 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
|
||||
val channelOpenFuture = b.connect(serverAddress.getHostName, serverAddress.getPort)
|
||||
|
||||
def errorHandler(t: Throwable) = {
|
||||
def errorHandler(t: Throwable): Unit = {
|
||||
log.info("server={} connection error (reason={})", serverAddress, t.getMessage)
|
||||
self ! Close
|
||||
}
|
||||
|
@ -169,6 +169,7 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
val json = ("method" -> request.method) ~ ("params" -> request.params.map {
|
||||
case s: String => new JString(s)
|
||||
case b: ByteVector32 => new JString(b.toHex)
|
||||
case b: Boolean => new JBool(b)
|
||||
case t: Int => new JInt(t)
|
||||
case t: Long => new JLong(t)
|
||||
case t: Double => new JDouble(t)
|
||||
|
@ -182,8 +183,6 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
|
||||
/**
|
||||
* Forwards incoming messages to the underlying actor
|
||||
*
|
||||
* @param actor
|
||||
*/
|
||||
class ActorHandler(actor: ActorRef) extends ChannelInboundHandlerAdapter {
|
||||
|
||||
|
@ -220,7 +219,7 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
case PingResponse => ()
|
||||
|
||||
case Close =>
|
||||
statusListeners.map(_ ! ElectrumDisconnected)
|
||||
statusListeners.foreach(_ ! ElectrumDisconnected)
|
||||
context.stop(self)
|
||||
|
||||
case _ => log.warning("server={} unhandled message {}", serverAddress, message)
|
||||
|
@ -282,7 +281,7 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
case Right(json: JsonRPCResponse) =>
|
||||
val (height, header) = parseBlockHeader(json.result)
|
||||
log.debug("connected to server={}, tip={} height={}", serverAddress, header.hash, height)
|
||||
statusListeners.map(_ ! ElectrumReady(height, header, serverAddress))
|
||||
statusListeners.foreach(_ ! ElectrumReady(height, header, serverAddress))
|
||||
context become connected(ctx, height, header, Map())
|
||||
|
||||
case AddStatusListener(actor) => statusListeners += actor
|
||||
|
@ -322,11 +321,11 @@ class ElectrumClient(serverAddress: InetSocketAddress, ssl: SSL)(implicit val ec
|
|||
}
|
||||
context become connected(ctx, height, tip, requests - json.id)
|
||||
|
||||
case Left(response: HeaderSubscriptionResponse) => headerSubscriptions.map(_ ! response)
|
||||
case Left(response: HeaderSubscriptionResponse) => headerSubscriptions.foreach(_ ! response)
|
||||
|
||||
case Left(response: AddressSubscriptionResponse) => addressSubscriptions.get(response.address).map(listeners => listeners.map(_ ! response))
|
||||
case Left(response: AddressSubscriptionResponse) => addressSubscriptions.get(response.address).foreach(listeners => listeners.foreach(_ ! response))
|
||||
|
||||
case Left(response: ScriptHashSubscriptionResponse) => scriptHashSubscriptions.get(response.scriptHash).map(listeners => listeners.map(_ ! response))
|
||||
case Left(response: ScriptHashSubscriptionResponse) => scriptHashSubscriptions.get(response.scriptHash).foreach(listeners => listeners.foreach(_ ! response))
|
||||
|
||||
case HeaderSubscriptionResponse(height, newtip) =>
|
||||
log.info("server={} new tip={}", serverAddress, newtip)
|
||||
|
@ -381,6 +380,9 @@ object ElectrumClient {
|
|||
case class BroadcastTransaction(tx: Transaction) extends Request
|
||||
case class BroadcastTransactionResponse(tx: Transaction, error: Option[Error]) extends Response
|
||||
|
||||
case class GetTransactionIdFromPosition(height: Int, tx_pos: Int, merkle: Boolean = false) extends Request
|
||||
case class GetTransactionIdFromPositionResponse(txid: ByteVector32, height: Int, tx_pos: Int, merkle: Seq[ByteVector32]) extends Response
|
||||
|
||||
case class GetTransaction(txid: ByteVector32) extends Request
|
||||
case class GetTransactionResponse(tx: Transaction) extends Response
|
||||
|
||||
|
@ -533,10 +535,11 @@ object ElectrumClient {
|
|||
case AddressSubscription(address, _) => JsonRPCRequest(id = reqId, method = "blockchain.address.subscribe", params = address :: Nil)
|
||||
case ScriptHashSubscription(scriptHash, _) => JsonRPCRequest(id = reqId, method = "blockchain.scripthash.subscribe", params = scriptHash.toString() :: Nil)
|
||||
case BroadcastTransaction(tx) => JsonRPCRequest(id = reqId, method = "blockchain.transaction.broadcast", params = Transaction.write(tx).toHex :: Nil)
|
||||
case GetTransactionIdFromPosition(height, tx_pos, merkle) => JsonRPCRequest(id = reqId, method = "blockchain.transaction.id_from_pos", params = height :: tx_pos :: merkle :: Nil)
|
||||
case GetTransaction(txid) => JsonRPCRequest(id = reqId, method = "blockchain.transaction.get", params = txid :: Nil)
|
||||
case HeaderSubscription(_) => JsonRPCRequest(id = reqId, method = "blockchain.headers.subscribe", params = Nil)
|
||||
case GetHeader(height) => JsonRPCRequest(id = reqId, method = "blockchain.block.header", params = height :: Nil)
|
||||
case GetHeaders(start_height, count, cp_height) => JsonRPCRequest(id = reqId, method = "blockchain.block.headers", params = start_height :: count :: Nil)
|
||||
case GetHeaders(start_height, count, _) => JsonRPCRequest(id = reqId, method = "blockchain.block.headers", params = start_height :: count :: Nil)
|
||||
case GetMerkle(txid, height) => JsonRPCRequest(id = reqId, method = "blockchain.transaction.get_merkle", params = txid :: height :: Nil)
|
||||
}
|
||||
|
||||
|
@ -548,7 +551,7 @@ object ElectrumClient {
|
|||
case _ => ServerError(request, error)
|
||||
}
|
||||
case None => (request: @unchecked) match {
|
||||
case s: ServerVersion =>
|
||||
case _: ServerVersion =>
|
||||
val JArray(jitems) = json.result
|
||||
val JString(clientName) = jitems(0)
|
||||
val JString(protocolVersion) = jitems(1)
|
||||
|
@ -590,6 +593,14 @@ object ElectrumClient {
|
|||
UnspentItem(ByteVector32.fromValidHex(tx_hash), tx_pos, value, height)
|
||||
})
|
||||
ScriptHashListUnspentResponse(scripthash, items)
|
||||
case GetTransactionIdFromPosition(height, tx_pos, false) =>
|
||||
val JString(tx_hash) = json.result
|
||||
GetTransactionIdFromPositionResponse(ByteVector32.fromValidHex(tx_hash), height, tx_pos, Nil)
|
||||
case GetTransactionIdFromPosition(height, tx_pos, true) =>
|
||||
val JString(tx_hash) = json.result \ "tx_hash"
|
||||
val JArray(hashes) = json.result \ "merkle"
|
||||
val leaves = hashes collect { case JString(value) => ByteVector32.fromValidHex(value) }
|
||||
GetTransactionIdFromPositionResponse(ByteVector32.fromValidHex(tx_hash), height, tx_pos, leaves)
|
||||
case GetTransaction(_) =>
|
||||
val JString(hex) = json.result
|
||||
GetTransactionResponse(Transaction.read(hex))
|
||||
|
@ -614,16 +625,15 @@ object ElectrumClient {
|
|||
case GetHeader(height) =>
|
||||
val JString(hex) = json.result
|
||||
GetHeaderResponse(height, BlockHeader.read(hex))
|
||||
case GetHeaders(start_height, count, cp_height) =>
|
||||
val count = intField(json.result, "count")
|
||||
case GetHeaders(start_height, _, _) =>
|
||||
val max = intField(json.result, "max")
|
||||
val JString(hex) = json.result \ "hex"
|
||||
val bin = ByteVector.fromValidHex(hex).toArray
|
||||
val blockHeaders = bin.grouped(80).map(BlockHeader.read).toList
|
||||
GetHeadersResponse(start_height, blockHeaders, max)
|
||||
case GetMerkle(txid, height) =>
|
||||
case GetMerkle(txid, _) =>
|
||||
val JArray(hashes) = json.result \ "merkle"
|
||||
val leaves = hashes collect { case JString(value) => ByteVector32.fromValidHex((value)) }
|
||||
val leaves = hashes collect { case JString(value) => ByteVector32.fromValidHex(value) }
|
||||
val blockHeight = intField(json.result, "block_height")
|
||||
val JInt(pos) = json.result \ "pos"
|
||||
GetMerkleResponse(txid, leaves, blockHeight, pos.toInt)
|
||||
|
|
|
@ -739,7 +739,7 @@ object ElectrumWallet {
|
|||
}
|
||||
|
||||
/**
|
||||
* @scriptHash script hash
|
||||
*
|
||||
* @return the ids of transactions that belong to our wallet history for this script hash but that we don't have
|
||||
* and have no pending requests for.
|
||||
*/
|
||||
|
|
|
@ -136,7 +136,7 @@ class SqliteWalletDb(sqlite: Connection) extends WalletDb {
|
|||
object SqliteWalletDb {
|
||||
|
||||
import fr.acinq.eclair.wire.ChannelCodecs._
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import scodec.Codec
|
||||
import scodec.bits.BitVector
|
||||
import scodec.codecs._
|
||||
|
|
|
@ -1644,7 +1644,10 @@ class Channel(val nodeParams: NodeParams, val wallet: EclairWallet, remoteNodeId
|
|||
case Event(Status.Failure(_: CannotAffordFees), _) => stay
|
||||
|
||||
// funding tx was confirmed in time, let's just ignore this
|
||||
case Event(BITCOIN_FUNDING_TIMEOUT, d: HasCommitments) => stay
|
||||
case Event(BITCOIN_FUNDING_TIMEOUT, _: HasCommitments) => stay
|
||||
|
||||
// peer doesn't cancel the timer
|
||||
case Event(TickChannelOpenTimeout, _) => stay
|
||||
|
||||
case Event(WatchEventSpent(BITCOIN_FUNDING_SPENT, tx), d: HasCommitments) if tx.txid == d.commitments.localCommit.publishableTxs.commitTx.tx.txid =>
|
||||
log.warning(s"processing local commit spent in catch-all handler")
|
||||
|
|
|
@ -71,12 +71,18 @@ case class Commitments(localParams: LocalParams, remoteParams: RemoteParams,
|
|||
|
||||
def addRemoteProposal(proposal: UpdateMessage): Commitments = Commitments.addRemoteProposal(this, proposal)
|
||||
|
||||
def announceChannel: Boolean = (channelFlags & 0x01) != 0
|
||||
val announceChannel: Boolean = (channelFlags & 0x01) != 0
|
||||
|
||||
def availableBalanceForSendMsat: Long = {
|
||||
lazy val availableBalanceForSendMsat: Long = {
|
||||
val reduced = CommitmentSpec.reduce(remoteCommit.spec, remoteChanges.acked, localChanges.proposed)
|
||||
val feesMsat = if (localParams.isFunder) Transactions.commitTxFee(Satoshi(remoteParams.dustLimitSatoshis), reduced).amount * 1000 else 0
|
||||
reduced.toRemoteMsat - remoteParams.channelReserveSatoshis * 1000 - feesMsat
|
||||
math.max(reduced.toRemoteMsat - remoteParams.channelReserveSatoshis * 1000 - feesMsat, 0)
|
||||
}
|
||||
|
||||
lazy val availableBalanceForReceiveMsat: Long = {
|
||||
val reduced = CommitmentSpec.reduce(localCommit.spec, localChanges.acked, remoteChanges.proposed)
|
||||
val feesMsat = if (localParams.isFunder) 0 else Transactions.commitTxFee(Satoshi(localParams.dustLimitSatoshis), reduced).amount * 1000
|
||||
math.max(reduced.toRemoteMsat - localParams.channelReserveSatoshis * 1000 - feesMsat, 0)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
package fr.acinq.eclair.crypto
|
||||
|
||||
import fr.acinq.bitcoin._
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs
|
||||
import fr.acinq.eclair.wire.CommonCodecs
|
||||
import scodec.Codec
|
||||
|
||||
import scala.annotation.tailrec
|
||||
|
@ -117,7 +117,7 @@ object ShaChain {
|
|||
import scodec.codecs._
|
||||
|
||||
// codec for a single map entry (i.e. Vector[Boolean] -> ByteVector
|
||||
val entryCodec = vectorOfN(uint16, bool) ~ variableSizeBytes(uint16, LightningMessageCodecs.bytes32)
|
||||
val entryCodec = vectorOfN(uint16, bool) ~ variableSizeBytes(uint16, CommonCodecs.bytes32)
|
||||
|
||||
// codec for a Map[Vector[Boolean], ByteVector]: write all k -> v pairs using the codec defined above
|
||||
val mapCodec: Codec[Map[Vector[Boolean], ByteVector32]] = Codec[Map[Vector[Boolean], ByteVector32]](
|
||||
|
|
|
@ -38,7 +38,7 @@ import SqliteUtils.ExtendedResultSet._
|
|||
}
|
||||
|
||||
override def addOrUpdatePeer(nodeId: Crypto.PublicKey, nodeaddress: NodeAddress): Unit = {
|
||||
val data = LightningMessageCodecs.nodeaddress.encode(nodeaddress).require.toByteArray
|
||||
val data = CommonCodecs.nodeaddress.encode(nodeaddress).require.toByteArray
|
||||
using(sqlite.prepareStatement("UPDATE peers SET data=? WHERE node_id=?")) { update =>
|
||||
update.setBytes(1, data)
|
||||
update.setBytes(2, nodeId.value.toArray)
|
||||
|
@ -65,7 +65,7 @@ import SqliteUtils.ExtendedResultSet._
|
|||
var m: Map[PublicKey, NodeAddress] = Map()
|
||||
while (rs.next()) {
|
||||
val nodeid = PublicKey(rs.getByteVector("node_id"))
|
||||
val nodeaddress = LightningMessageCodecs.nodeaddress.decode(BitVector(rs.getBytes("data"))).require.value
|
||||
val nodeaddress = CommonCodecs.nodeaddress.decode(BitVector(rs.getBytes("data"))).require.value
|
||||
m += (nodeid -> nodeaddress)
|
||||
}
|
||||
m
|
||||
|
|
|
@ -47,9 +47,9 @@ class Authenticator(nodeParams: NodeParams) extends Actor with DiagnosticActorLo
|
|||
KeyPair(nodeParams.nodeId.value, nodeParams.privateKey.value),
|
||||
remoteNodeId_opt.map(_.value),
|
||||
connection = connection,
|
||||
codec = LightningMessageCodecs.cachedLightningMessageCodec))
|
||||
codec = LightningMessageCodecs.lightningMessageCodec))
|
||||
context watch transport
|
||||
context become (ready(switchboard, authenticating + (transport -> pending)))
|
||||
context become ready(switchboard, authenticating + (transport -> pending))
|
||||
|
||||
case HandshakeCompleted(connection, transport, remoteNodeId) if authenticating.contains(transport) =>
|
||||
val pendingAuth = authenticating(transport)
|
||||
|
|
|
@ -29,13 +29,11 @@ import fr.acinq.bitcoin.{ByteVector32, DeterministicWallet, MilliSatoshi, Protoc
|
|||
import fr.acinq.eclair.blockchain.EclairWallet
|
||||
import fr.acinq.eclair.channel._
|
||||
import fr.acinq.eclair.crypto.TransportHandler
|
||||
import fr.acinq.eclair.secureRandom
|
||||
import fr.acinq.eclair.router._
|
||||
import fr.acinq.eclair.wire._
|
||||
import fr.acinq.eclair.{wire, _}
|
||||
import fr.acinq.eclair.{secureRandom, wire, _}
|
||||
import scodec.Attempt
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.compat.Platform
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.Random
|
||||
|
@ -88,7 +86,7 @@ class Peer(nodeParams: NodeParams, remoteNodeId: PublicKey, authenticator: Actor
|
|||
context.actorOf(Client.props(nodeParams, authenticator, address, remoteNodeId, origin_opt = None))
|
||||
log.info(s"reconnecting to $address")
|
||||
// exponential backoff retry with a finite max
|
||||
setTimer(RECONNECT_TIMER, Reconnect, Math.min(10 + Math.pow(2, d.attempts), 20) seconds, repeat = false)
|
||||
setTimer(RECONNECT_TIMER, Reconnect, Math.min(10 + Math.pow(2, d.attempts), 3600) seconds, repeat = false)
|
||||
stay using d.copy(attempts = d.attempts + 1)
|
||||
}
|
||||
|
||||
|
@ -207,6 +205,11 @@ class Peer(nodeParams: NodeParams, remoteNodeId: PublicKey, authenticator: Actor
|
|||
d.transport ! PoisonPill
|
||||
stay
|
||||
|
||||
case Event(unhandledMsg: LightningMessage, d: InitializingData) =>
|
||||
// we ack unhandled messages because we don't want to block further reads on the connection
|
||||
d.transport ! TransportHandler.ReadAck(unhandledMsg)
|
||||
log.warning(s"acking unhandled message $unhandledMsg")
|
||||
stay
|
||||
}
|
||||
|
||||
when(CONNECTED) {
|
||||
|
@ -474,6 +477,12 @@ class Peer(nodeParams: NodeParams, remoteNodeId: PublicKey, authenticator: Actor
|
|||
d.channels.values.toSet[ActorRef].foreach(_ ! INPUT_DISCONNECTED) // we deduplicate with toSet because there might be two entries per channel (tmp id and final id)
|
||||
self ! h
|
||||
goto(DISCONNECTED) using DisconnectedData(d.address_opt, d.channels.collect { case (k: FinalChannelId, v) => (k, v) })
|
||||
|
||||
case Event(unhandledMsg: LightningMessage, d: ConnectedData) =>
|
||||
// we ack unhandled messages because we don't want to block further reads on the connection
|
||||
d.transport ! TransportHandler.ReadAck(unhandledMsg)
|
||||
log.warning(s"acking unhandled message $unhandledMsg")
|
||||
stay
|
||||
}
|
||||
|
||||
whenUnhandled {
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
package fr.acinq.eclair.payment
|
||||
|
||||
import java.math.BigInteger
|
||||
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.{MilliSatoshi, _}
|
||||
import fr.acinq.eclair.ShortChannelId
|
||||
|
@ -303,7 +301,7 @@ object PaymentRequest {
|
|||
|
||||
object Codecs {
|
||||
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import scodec.bits.BitVector
|
||||
import scodec.codecs._
|
||||
import scodec.{Attempt, Codec, DecodeResult}
|
||||
|
@ -399,6 +397,7 @@ object PaymentRequest {
|
|||
case a if a.last == 'n' => Some(MilliSatoshi(a.dropRight(1).toLong * 100L))
|
||||
case a if a.last == 'u' => Some(MilliSatoshi(a.dropRight(1).toLong * 100000L))
|
||||
case a if a.last == 'm' => Some(MilliSatoshi(a.dropRight(1).toLong * 100000000L))
|
||||
case a => Some(MilliSatoshi(a.toLong * 100000000000L))
|
||||
}
|
||||
|
||||
def encode(amount: Option[MilliSatoshi]): String = {
|
||||
|
|
|
@ -47,6 +47,9 @@ case class ForwardFulfill(fulfill: UpdateFulfillHtlc, to: Origin, htlc: UpdateAd
|
|||
case class ForwardFail(fail: UpdateFailHtlc, to: Origin, htlc: UpdateAddHtlc) extends ForwardMessage
|
||||
case class ForwardFailMalformed(fail: UpdateFailMalformedHtlc, to: Origin, htlc: UpdateAddHtlc) extends ForwardMessage
|
||||
|
||||
case object GetUsableBalances
|
||||
case class UsableBalances(remoteNodeId: PublicKey, shortChannelId: ShortChannelId, canSendMsat: Long, canReceiveMsat: Long, isPublic: Boolean)
|
||||
|
||||
// @formatter:on
|
||||
|
||||
|
||||
|
@ -69,10 +72,20 @@ class Relayer(nodeParams: NodeParams, register: ActorRef, paymentHandler: ActorR
|
|||
override def receive: Receive = main(Map.empty, new mutable.HashMap[PublicKey, mutable.Set[ShortChannelId]] with mutable.MultiMap[PublicKey, ShortChannelId])
|
||||
|
||||
def main(channelUpdates: Map[ShortChannelId, OutgoingChannel], node2channels: mutable.HashMap[PublicKey, mutable.Set[ShortChannelId]] with mutable.MultiMap[PublicKey, ShortChannelId]): Receive = {
|
||||
case GetUsableBalances =>
|
||||
sender ! channelUpdates.values
|
||||
.filter(o => Announcements.isEnabled(o.channelUpdate.channelFlags))
|
||||
.map(o => UsableBalances(
|
||||
remoteNodeId = o.nextNodeId,
|
||||
shortChannelId = o.channelUpdate.shortChannelId,
|
||||
canSendMsat = o.commitments.availableBalanceForSendMsat,
|
||||
canReceiveMsat = o.commitments.availableBalanceForReceiveMsat,
|
||||
isPublic = o.commitments.announceChannel))
|
||||
|
||||
case LocalChannelUpdate(_, channelId, shortChannelId, remoteNodeId, _, channelUpdate, commitments) =>
|
||||
log.debug(s"updating local channel info for channelId=$channelId shortChannelId=$shortChannelId remoteNodeId=$remoteNodeId channelUpdate={} commitments={}", channelUpdate, commitments)
|
||||
context become main(channelUpdates + (channelUpdate.shortChannelId -> OutgoingChannel(remoteNodeId, channelUpdate, commitments.availableBalanceForSendMsat)), node2channels.addBinding(remoteNodeId, channelUpdate.shortChannelId))
|
||||
val channelUpdates1 = channelUpdates + (channelUpdate.shortChannelId -> OutgoingChannel(remoteNodeId, channelUpdate, commitments))
|
||||
context become main(channelUpdates1, node2channels.addBinding(remoteNodeId, channelUpdate.shortChannelId))
|
||||
|
||||
case LocalChannelDown(_, channelId, shortChannelId, remoteNodeId) =>
|
||||
log.debug(s"removed local channel info for channelId=$channelId shortChannelId=$shortChannelId")
|
||||
|
@ -80,7 +93,7 @@ class Relayer(nodeParams: NodeParams, register: ActorRef, paymentHandler: ActorR
|
|||
|
||||
case AvailableBalanceChanged(_, _, shortChannelId, _, commitments) =>
|
||||
val channelUpdates1 = channelUpdates.get(shortChannelId) match {
|
||||
case Some(c: OutgoingChannel) => channelUpdates + (shortChannelId -> c.copy(availableBalanceMsat = commitments.availableBalanceForSendMsat))
|
||||
case Some(c: OutgoingChannel) => channelUpdates + (shortChannelId -> c.copy(commitments = commitments))
|
||||
case None => channelUpdates // we only consider the balance if we have the channel_update
|
||||
}
|
||||
context become main(channelUpdates1, node2channels)
|
||||
|
@ -197,7 +210,7 @@ class Relayer(nodeParams: NodeParams, register: ActorRef, paymentHandler: ActorR
|
|||
object Relayer {
|
||||
def props(nodeParams: NodeParams, register: ActorRef, paymentHandler: ActorRef) = Props(classOf[Relayer], nodeParams, register, paymentHandler)
|
||||
|
||||
case class OutgoingChannel(nextNodeId: PublicKey, channelUpdate: ChannelUpdate, availableBalanceMsat: Long)
|
||||
case class OutgoingChannel(nextNodeId: PublicKey, channelUpdate: ChannelUpdate, commitments: Commitments)
|
||||
|
||||
// @formatter:off
|
||||
sealed trait NextPayload
|
||||
|
@ -302,10 +315,10 @@ object Relayer {
|
|||
val channelInfo_opt = channelUpdates.get(shortChannelId)
|
||||
val channelUpdate_opt = channelInfo_opt.map(_.channelUpdate)
|
||||
val relayResult = relayOrFail(relayPayload, channelUpdate_opt)
|
||||
log.debug(s"candidate channel for htlc #${add.id} paymentHash=${add.paymentHash}: shortChannelId={} balanceMsat={} channelUpdate={} relayResult={}", shortChannelId, channelInfo_opt.map(_.availableBalanceMsat).getOrElse(""), channelUpdate_opt.getOrElse(""), relayResult)
|
||||
log.debug(s"candidate channel for htlc #${add.id} paymentHash=${add.paymentHash}: shortChannelId={} balanceMsat={} channelUpdate={} relayResult={}", shortChannelId, channelInfo_opt.map(_.commitments.availableBalanceForSendMsat).getOrElse(""), channelUpdate_opt.getOrElse(""), relayResult)
|
||||
(shortChannelId, channelInfo_opt, relayResult)
|
||||
}
|
||||
.collect { case (shortChannelId, Some(channelInfo), Right(_)) => (shortChannelId, channelInfo.availableBalanceMsat) }
|
||||
.collect { case (shortChannelId, Some(channelInfo), Right(_)) => (shortChannelId, channelInfo.commitments.availableBalanceForSendMsat) }
|
||||
.filter(_._2 > relayPayload.payload.amtToForward) // we only keep channels that have enough balance to handle this payment
|
||||
.toList // needed for ordering
|
||||
.sortBy(_._2) // we want to use the channel with the lowest available balance that can process the payment
|
||||
|
|
|
@ -124,6 +124,7 @@ object Announcements {
|
|||
* @return true if channel updates are "equal"
|
||||
*/
|
||||
def areSame(u1: ChannelUpdate, u2: ChannelUpdate): Boolean =
|
||||
// NB: On Android, we don't compare chain_hash and signature, because they are stripped
|
||||
u1.copy(chainHash = ByteVector32.Zeroes, signature = ByteVector64.Zeroes, timestamp = 0) == u2.copy(chainHash = ByteVector32.Zeroes, signature = ByteVector64.Zeroes, timestamp = 0) // README: on Android we discard chainHash too
|
||||
|
||||
def makeMessageFlags(hasOptionChannelHtlcMax: Boolean): Byte = BitVector.bits(hasOptionChannelHtlcMax :: Nil).padLeft(8).toByte()
|
||||
|
|
|
@ -20,6 +20,7 @@ import akka.Done
|
|||
import akka.actor.{ActorRef, Props, Status}
|
||||
import akka.event.Logging.MDC
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64, Satoshi}
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64}
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.bitcoin.Script.{pay2wsh, write}
|
||||
import fr.acinq.eclair._
|
||||
|
@ -140,6 +141,8 @@ class Router(nodeParams: NodeParams, watcher: ActorRef, initialized: Option[Prom
|
|||
// this will be used to calculate routes
|
||||
val graph = DirectedGraph.makeGraph(initChannelUpdates)
|
||||
|
||||
// On Android we don't watch the funding tx outputs of public channels
|
||||
|
||||
log.info(s"initialization completed, ready to process messages")
|
||||
Try(initialized.map(_.success(Done)))
|
||||
startWith(NORMAL, Data(Map.empty, initChannels, initChannelUpdates, Stash(Map.empty, Map.empty), awaiting = Map.empty, privateChannels = Map.empty, privateUpdates = Map.empty, excludedChannels = Set.empty, graph, sync = Map.empty))
|
||||
|
|
|
@ -26,6 +26,7 @@ import fr.acinq.eclair.crypto.ShaChain
|
|||
import fr.acinq.eclair.payment.{Local, Origin, Relayed}
|
||||
import fr.acinq.eclair.transactions.Transactions._
|
||||
import fr.acinq.eclair.transactions._
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import grizzled.slf4j.Logging
|
||||
import scodec.bits.BitVector
|
||||
|
@ -35,7 +36,6 @@ import scodec.{Attempt, Codec}
|
|||
import scala.compat.Platform
|
||||
import scala.concurrent.duration._
|
||||
|
||||
|
||||
/**
|
||||
* Created by PM on 02/06/2017.
|
||||
*/
|
||||
|
@ -53,10 +53,10 @@ object ChannelCodecs extends Logging {
|
|||
val localParamsCodec: Codec[LocalParams] = (
|
||||
("nodeId" | publicKey) ::
|
||||
("channelPath" | keyPathCodec) ::
|
||||
("dustLimitSatoshis" | uint64) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64ex) ::
|
||||
("channelReserveSatoshis" | uint64) ::
|
||||
("htlcMinimumMsat" | uint64) ::
|
||||
("dustLimitSatoshis" | uint64overflow) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserveSatoshis" | uint64overflow) ::
|
||||
("htlcMinimumMsat" | uint64overflow) ::
|
||||
("toSelfDelay" | uint16) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
("isFunder" | bool) ::
|
||||
|
@ -66,10 +66,10 @@ object ChannelCodecs extends Logging {
|
|||
|
||||
val remoteParamsCodec: Codec[RemoteParams] = (
|
||||
("nodeId" | publicKey) ::
|
||||
("dustLimitSatoshis" | uint64) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64ex) ::
|
||||
("channelReserveSatoshis" | uint64) ::
|
||||
("htlcMinimumMsat" | uint64) ::
|
||||
("dustLimitSatoshis" | uint64overflow) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserveSatoshis" | uint64overflow) ::
|
||||
("htlcMinimumMsat" | uint64overflow) ::
|
||||
("toSelfDelay" | uint16) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
("fundingPubKey" | publicKey) ::
|
||||
|
@ -97,14 +97,14 @@ object ChannelCodecs extends Logging {
|
|||
val commitmentSpecCodec: Codec[CommitmentSpec] = (
|
||||
("htlcs" | setCodec(htlcCodec)) ::
|
||||
("feeratePerKw" | uint32) ::
|
||||
("toLocalMsat" | uint64) ::
|
||||
("toRemoteMsat" | uint64)).as[CommitmentSpec]
|
||||
("toLocalMsat" | uint64overflow) ::
|
||||
("toRemoteMsat" | uint64overflow)).as[CommitmentSpec]
|
||||
|
||||
def outPointCodec: Codec[OutPoint] = variableSizeBytes(uint16, bytes.xmap(d => OutPoint.read(d.toArray), d => OutPoint.write(d)))
|
||||
val outPointCodec: Codec[OutPoint] = variableSizeBytes(uint16, bytes.xmap(d => OutPoint.read(d.toArray), d => OutPoint.write(d)))
|
||||
|
||||
def txOutCodec: Codec[TxOut] = variableSizeBytes(uint16, bytes.xmap(d => TxOut.read(d.toArray), d => TxOut.write(d)))
|
||||
val txOutCodec: Codec[TxOut] = variableSizeBytes(uint16, bytes.xmap(d => TxOut.read(d.toArray), d => TxOut.write(d)))
|
||||
|
||||
def txCodec: Codec[Transaction] = variableSizeBytes(uint16, bytes.xmap(d => Transaction.read(d.toArray), d => Transaction.write(d)))
|
||||
val txCodec: Codec[Transaction] = variableSizeBytes(uint16, bytes.xmap(d => Transaction.read(d.toArray), d => Transaction.write(d)))
|
||||
|
||||
val inputInfoCodec: Codec[InputInfo] = (
|
||||
("outPoint" | outPointCodec) ::
|
||||
|
@ -142,12 +142,12 @@ object ChannelCodecs extends Logging {
|
|||
("htlcTxsAndSigs" | listOfN(uint16, htlcTxAndSigsCodec))).as[PublishableTxs]
|
||||
|
||||
val localCommitCodec: Codec[LocalCommit] = (
|
||||
("index" | uint64) ::
|
||||
("index" | uint64overflow) ::
|
||||
("spec" | commitmentSpecCodec) ::
|
||||
("publishableTxs" | publishableTxsCodec)).as[LocalCommit]
|
||||
|
||||
val remoteCommitCodec: Codec[RemoteCommit] = (
|
||||
("index" | uint64) ::
|
||||
("index" | uint64overflow) ::
|
||||
("spec" | commitmentSpecCodec) ::
|
||||
("txid" | bytes32) ::
|
||||
("remotePerCommitmentPoint" | publicKey)).as[RemoteCommit]
|
||||
|
@ -167,7 +167,7 @@ object ChannelCodecs extends Logging {
|
|||
val waitingForRevocationCodec: Codec[WaitingForRevocation] = (
|
||||
("nextRemoteCommit" | remoteCommitCodec) ::
|
||||
("sent" | commitSigCodec) ::
|
||||
("sentAfterLocalCommitIndex" | uint64) ::
|
||||
("sentAfterLocalCommitIndex" | uint64overflow) ::
|
||||
("reSignAsap" | bool)).as[WaitingForRevocation]
|
||||
|
||||
val localCodec: Codec[Local] = (
|
||||
|
@ -178,8 +178,8 @@ object ChannelCodecs extends Logging {
|
|||
val relayedCodec: Codec[Relayed] = (
|
||||
("originChannelId" | bytes32) ::
|
||||
("originHtlcId" | int64) ::
|
||||
("amountMsatIn" | uint64) ::
|
||||
("amountMsatOut" | uint64)).as[Relayed]
|
||||
("amountMsatIn" | uint64overflow) ::
|
||||
("amountMsatOut" | uint64overflow)).as[Relayed]
|
||||
|
||||
// this is for backward compatibility to handle legacy payments that didn't have identifiers
|
||||
val UNKNOWN_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000")
|
||||
|
@ -211,8 +211,8 @@ object ChannelCodecs extends Logging {
|
|||
("remoteCommit" | remoteCommitCodec) ::
|
||||
("localChanges" | localChangesCodec) ::
|
||||
("remoteChanges" | remoteChangesCodec) ::
|
||||
("localNextHtlcId" | uint64) ::
|
||||
("remoteNextHtlcId" | uint64) ::
|
||||
("localNextHtlcId" | uint64overflow) ::
|
||||
("remoteNextHtlcId" | uint64overflow) ::
|
||||
("originChannels" | originsMapCodec) ::
|
||||
("remoteNextCommitInfo" | either(bool, waitingForRevocationCodec, publicKey)) ::
|
||||
("commitInput" | inputInfoCodec) ::
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
package fr.acinq.eclair.wire
|
||||
|
||||
import fr.acinq.eclair.channel.{CMD_FAIL_HTLC, CMD_FAIL_MALFORMED_HTLC, CMD_FULFILL_HTLC, Command}
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import fr.acinq.eclair.wire.FailureMessageCodecs.failureMessageCodec
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs._
|
||||
import scodec.Codec
|
||||
import scodec.codecs._
|
||||
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.net.{Inet4Address, Inet6Address, InetAddress}
|
||||
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64}
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.eclair.{ShortChannelId, UInt64}
|
||||
import org.apache.commons.codec.binary.Base32
|
||||
import scodec.{Attempt, Codec, DecodeResult, Err, SizeBound}
|
||||
import scodec.bits.{BitVector, ByteVector}
|
||||
import scodec.codecs._
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
/**
|
||||
* Created by t-bast on 20/06/2019.
|
||||
*/
|
||||
|
||||
object CommonCodecs {
|
||||
|
||||
/**
|
||||
* Discriminator codec with a default fallback codec (of the same type).
|
||||
*/
|
||||
def discriminatorWithDefault[A](discriminator: Codec[A], fallback: Codec[A]): Codec[A] = new Codec[A] {
|
||||
def sizeBound: SizeBound = discriminator.sizeBound | fallback.sizeBound
|
||||
|
||||
def encode(e: A): Attempt[BitVector] = discriminator.encode(e).recoverWith { case _ => fallback.encode(e) }
|
||||
|
||||
def decode(b: BitVector): Attempt[DecodeResult[A]] = discriminator.decode(b).recoverWith {
|
||||
case _: KnownDiscriminatorType[_]#UnknownDiscriminator => fallback.decode(b)
|
||||
}
|
||||
}
|
||||
|
||||
// this codec can be safely used for values < 2^63 and will fail otherwise
|
||||
// (for something smarter see https://github.com/yzernik/bitcoin-scodec/blob/master/src/main/scala/io/github/yzernik/bitcoinscodec/structures/UInt64.scala)
|
||||
val uint64overflow: Codec[Long] = int64.narrow(l => if (l >= 0) Attempt.Successful(l) else Attempt.failure(Err(s"overflow for value $l")), l => l)
|
||||
|
||||
val uint64: Codec[UInt64] = bytes(8).xmap(b => UInt64(b), a => a.toByteVector.padLeft(8))
|
||||
|
||||
val uint64L: Codec[UInt64] = bytes(8).xmap(b => UInt64(b.reverse), a => a.toByteVector.padLeft(8).reverse)
|
||||
|
||||
/**
|
||||
* We impose a minimal encoding on varint values to ensure that signed hashes can be reproduced easily.
|
||||
* If a value could be encoded with less bytes, it's considered invalid and results in a failed decoding attempt.
|
||||
*
|
||||
* @param codec the integer codec (depends on the value).
|
||||
* @param min the minimal value that should be encoded.
|
||||
*/
|
||||
def uint64min(codec: Codec[UInt64], min: UInt64): Codec[UInt64] = codec.exmap({
|
||||
case i if i < min => Attempt.failure(Err("varint was not minimally encoded"))
|
||||
case i => Attempt.successful(i)
|
||||
}, Attempt.successful)
|
||||
|
||||
// Bitcoin-style varint codec (CompactSize).
|
||||
// See https://bitcoin.org/en/developer-reference#compactsize-unsigned-integers for reference.
|
||||
val varint: Codec[UInt64] = discriminatorWithDefault(
|
||||
discriminated[UInt64].by(uint8L)
|
||||
.\(0xff) { case i if i >= UInt64(0x100000000L) => i }(uint64min(uint64L, UInt64(0x100000000L)))
|
||||
.\(0xfe) { case i if i >= UInt64(0x10000) => i }(uint64min(uint32L.xmap(UInt64(_), _.toBigInt.toLong), UInt64(0x10000)))
|
||||
.\(0xfd) { case i if i >= UInt64(0xfd) => i }(uint64min(uint16L.xmap(UInt64(_), _.toBigInt.toInt), UInt64(0xfd))),
|
||||
uint8L.xmap(UInt64(_), _.toBigInt.toInt)
|
||||
)
|
||||
|
||||
// This codec can be safely used for values < 2^63 and will fail otherwise.
|
||||
// It is useful in combination with variableSizeBytesLong to encode/decode TLV lengths because those will always be < 2^63.
|
||||
val varintoverflow: Codec[Long] = varint.narrow(l => if (l <= UInt64(Long.MaxValue)) Attempt.successful(l.toBigInt.toLong) else Attempt.failure(Err(s"overflow for value $l")), l => UInt64(l))
|
||||
|
||||
val bytes32: Codec[ByteVector32] = limitedSizeBytes(32, bytesStrict(32).xmap(d => ByteVector32(d), d => d.bytes))
|
||||
|
||||
val bytes64: Codec[ByteVector64] = limitedSizeBytes(64, bytesStrict(64).xmap(d => ByteVector64(d), d => d.bytes))
|
||||
|
||||
val sha256: Codec[ByteVector32] = bytes32
|
||||
|
||||
val varsizebinarydata: Codec[ByteVector] = variableSizeBytes(uint16, bytes)
|
||||
|
||||
val listofsignatures: Codec[List[ByteVector64]] = listOfN(uint16, bytes64)
|
||||
|
||||
val ipv4address: Codec[Inet4Address] = bytes(4).xmap(b => InetAddress.getByAddress(b.toArray).asInstanceOf[Inet4Address], a => ByteVector(a.getAddress))
|
||||
|
||||
val ipv6address: Codec[Inet6Address] = bytes(16).exmap(b => Attempt.fromTry(Try(Inet6Address.getByAddress(null, b.toArray, null))), a => Attempt.fromTry(Try(ByteVector(a.getAddress))))
|
||||
|
||||
def base32(size: Int): Codec[String] = bytes(size).xmap(b => new Base32().encodeAsString(b.toArray).toLowerCase, a => ByteVector(new Base32().decode(a.toUpperCase())))
|
||||
|
||||
val nodeaddress: Codec[NodeAddress] =
|
||||
discriminated[NodeAddress].by(uint8)
|
||||
.typecase(1, (ipv4address :: uint16).as[IPv4])
|
||||
.typecase(2, (ipv6address :: uint16).as[IPv6])
|
||||
.typecase(3, (base32(10) :: uint16).as[Tor2])
|
||||
.typecase(4, (base32(35) :: uint16).as[Tor3])
|
||||
|
||||
// this one is a bit different from most other codecs: the first 'len' element is *not* the number of items
|
||||
// in the list but rather the number of bytes of the encoded list. The rationale is once we've read this
|
||||
// number of bytes we can just skip to the next field
|
||||
val listofnodeaddresses: Codec[List[NodeAddress]] = variableSizeBytes(uint16, list(nodeaddress))
|
||||
|
||||
val shortchannelid: Codec[ShortChannelId] = int64.xmap(l => ShortChannelId(l), s => s.toLong)
|
||||
|
||||
val privateKey: Codec[PrivateKey] = Codec[PrivateKey](
|
||||
(priv: PrivateKey) => bytes(32).encode(priv.value),
|
||||
(wire: BitVector) => bytes(32).decode(wire).map(_.map(b => PrivateKey(b)))
|
||||
)
|
||||
|
||||
val publicKey: Codec[PublicKey] = Codec[PublicKey](
|
||||
(pub: PublicKey) => bytes(33).encode(pub.value),
|
||||
(wire: BitVector) => bytes(33).decode(wire).map(_.map(b => PublicKey(b)))
|
||||
)
|
||||
|
||||
val rgb: Codec[Color] = bytes(3).xmap(buf => Color(buf(0), buf(1), buf(2)), t => ByteVector(t.r, t.g, t.b))
|
||||
|
||||
def zeropaddedstring(size: Int): Codec[String] = fixedSizeBytes(32, utf8).xmap(s => s.takeWhile(_ != '\u0000'), s => s)
|
||||
|
||||
}
|
|
@ -17,9 +17,10 @@
|
|||
package fr.acinq.eclair.wire
|
||||
|
||||
import fr.acinq.bitcoin.ByteVector32
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs.{bytes32, channelUpdateCodec, uint64}
|
||||
import fr.acinq.eclair.wire.CommonCodecs.{sha256, uint64overflow}
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs.channelUpdateCodec
|
||||
import scodec.codecs._
|
||||
import scodec.{Attempt, Codec}
|
||||
import scodec.Attempt
|
||||
|
||||
/**
|
||||
* see https://github.com/lightningnetwork/lightning-rfc/blob/master/04-onion-routing.md
|
||||
|
@ -63,8 +64,6 @@ object FailureMessageCodecs {
|
|||
val NODE = 0x2000
|
||||
val UPDATE = 0x1000
|
||||
|
||||
val sha256Codec: Codec[ByteVector32] = ("sha256Codec" | bytes32)
|
||||
|
||||
val channelUpdateCodecWithType = LightningMessageCodecs.lightningMessageCodec.narrow[ChannelUpdate](f => Attempt.successful(f.asInstanceOf[ChannelUpdate]), g => g)
|
||||
|
||||
// NB: for historical reasons some implementations were including/ommitting the message type (258 for ChannelUpdate)
|
||||
|
@ -76,22 +75,22 @@ object FailureMessageCodecs {
|
|||
.typecase(NODE | 2, provide(TemporaryNodeFailure))
|
||||
.typecase(PERM | 2, provide(PermanentNodeFailure))
|
||||
.typecase(PERM | NODE | 3, provide(RequiredNodeFeatureMissing))
|
||||
.typecase(BADONION | PERM | 4, sha256Codec.as[InvalidOnionVersion])
|
||||
.typecase(BADONION | PERM | 5, sha256Codec.as[InvalidOnionHmac])
|
||||
.typecase(BADONION | PERM | 6, sha256Codec.as[InvalidOnionKey])
|
||||
.typecase(UPDATE | 7, (("channelUpdate" | channelUpdateWithLengthCodec)).as[TemporaryChannelFailure])
|
||||
.typecase(BADONION | PERM | 4, sha256.as[InvalidOnionVersion])
|
||||
.typecase(BADONION | PERM | 5, sha256.as[InvalidOnionHmac])
|
||||
.typecase(BADONION | PERM | 6, sha256.as[InvalidOnionKey])
|
||||
.typecase(UPDATE | 7, ("channelUpdate" | channelUpdateWithLengthCodec).as[TemporaryChannelFailure])
|
||||
.typecase(PERM | 8, provide(PermanentChannelFailure))
|
||||
.typecase(PERM | 9, provide(RequiredChannelFeatureMissing))
|
||||
.typecase(PERM | 10, provide(UnknownNextPeer))
|
||||
.typecase(UPDATE | 11, (("amountMsat" | uint64) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[AmountBelowMinimum])
|
||||
.typecase(UPDATE | 12, (("amountMsat" | uint64) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[FeeInsufficient])
|
||||
.typecase(UPDATE | 11, (("amountMsat" | uint64overflow) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[AmountBelowMinimum])
|
||||
.typecase(UPDATE | 12, (("amountMsat" | uint64overflow) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[FeeInsufficient])
|
||||
.typecase(UPDATE | 13, (("expiry" | uint32) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[IncorrectCltvExpiry])
|
||||
.typecase(UPDATE | 14, (("channelUpdate" | channelUpdateWithLengthCodec)).as[ExpiryTooSoon])
|
||||
.typecase(UPDATE | 14, ("channelUpdate" | channelUpdateWithLengthCodec).as[ExpiryTooSoon])
|
||||
.typecase(UPDATE | 20, (("messageFlags" | byte) :: ("channelFlags" | byte) :: ("channelUpdate" | channelUpdateWithLengthCodec)).as[ChannelDisabled])
|
||||
.typecase(PERM | 15, (("amountMsat" | withDefaultValue(optional(bitsRemaining, uint64), 0L))).as[IncorrectOrUnknownPaymentDetails])
|
||||
.typecase(PERM | 15, ("amountMsat" | withDefaultValue(optional(bitsRemaining, uint64overflow), 0L)).as[IncorrectOrUnknownPaymentDetails])
|
||||
.typecase(PERM | 16, provide(IncorrectPaymentAmount))
|
||||
.typecase(17, provide(FinalExpiryTooSoon))
|
||||
.typecase(18, (("expiry" | uint32)).as[FinalIncorrectCltvExpiry])
|
||||
.typecase(19, (("amountMsat" | uint64)).as[FinalIncorrectHtlcAmount])
|
||||
.typecase(18, ("expiry" | uint32).as[FinalIncorrectCltvExpiry])
|
||||
.typecase(19, ("amountMsat" | uint64overflow).as[FinalIncorrectHtlcAmount])
|
||||
.typecase(21, provide(ExpiryTooFar))
|
||||
}
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import scodec.bits.{BitVector, ByteVector}
|
||||
import scodec.{Attempt, Codec, DecodeResult, Err, SizeBound, codecs}
|
||||
|
||||
/**
|
||||
*
|
||||
* REMOVE THIS A NEW VERSION OF SCODEC IS RELEASED THAT INCLUDES CHANGES MADE IN
|
||||
* https://github.com/scodec/scodec/pull/99/files
|
||||
*
|
||||
* Created by PM on 02/06/2017.
|
||||
*/
|
||||
final class FixedSizeStrictCodec[A](size: Long, codec: Codec[A]) extends Codec[A] {
|
||||
|
||||
override def sizeBound = SizeBound.exact(size)
|
||||
|
||||
override def encode(a: A) = for {
|
||||
encoded <- codec.encode(a)
|
||||
result <- {
|
||||
if (encoded.size != size)
|
||||
Attempt.failure(Err(s"[$a] requires ${encoded.size} bits but field is fixed size of exactly $size bits"))
|
||||
else
|
||||
Attempt.successful(encoded.padTo(size))
|
||||
}
|
||||
} yield result
|
||||
|
||||
override def decode(buffer: BitVector) = {
|
||||
if (buffer.size == size) {
|
||||
codec.decode(buffer.take(size)) map { res =>
|
||||
DecodeResult(res.value, buffer.drop(size))
|
||||
}
|
||||
} else {
|
||||
Attempt.failure(Err(s"expected exactly $size bits but got ${buffer.size} bits"))
|
||||
}
|
||||
}
|
||||
|
||||
override def toString = s"fixedSizeBitsStrict($size, $codec)"
|
||||
}
|
||||
|
||||
object FixedSizeStrictCodec {
|
||||
/**
|
||||
* Encodes by returning the supplied byte vector if its length is `size` bytes, otherwise returning error;
|
||||
* decodes by taking `size * 8` bits from the supplied bit vector and converting to a byte vector.
|
||||
*
|
||||
* @param size number of bits to encode/decode
|
||||
* @group bits
|
||||
*/
|
||||
def bytesStrict(size: Int): Codec[ByteVector] = new Codec[ByteVector] {
|
||||
private val codec = new FixedSizeStrictCodec(size * 8L, codecs.bits).xmap[ByteVector](_.toByteVector, _.toBitVector)
|
||||
|
||||
def sizeBound = codec.sizeBound
|
||||
|
||||
def encode(b: ByteVector) = codec.encode(b)
|
||||
|
||||
def decode(b: BitVector) = codec.decode(b)
|
||||
|
||||
override def toString = s"bytesStrict($size)"
|
||||
}
|
||||
}
|
|
@ -16,80 +16,18 @@
|
|||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.net.{Inet4Address, Inet6Address, InetAddress}
|
||||
|
||||
import com.google.common.cache.{CacheBuilder, CacheLoader}
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.{ByteVector32, ByteVector64}
|
||||
import fr.acinq.eclair.crypto.Sphinx
|
||||
import fr.acinq.eclair.wire.FixedSizeStrictCodec.bytesStrict
|
||||
import fr.acinq.eclair.{ShortChannelId, UInt64, wire}
|
||||
import org.apache.commons.codec.binary.Base32
|
||||
import scodec.bits.{BitVector, ByteVector}
|
||||
import fr.acinq.eclair.wire
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import scodec.bits.ByteVector
|
||||
import scodec.codecs._
|
||||
import scodec.{Attempt, Codec, DecodeResult, Err, SizeBound}
|
||||
|
||||
import scala.util.{Failure, Success, Try}
|
||||
|
||||
import scodec.Codec
|
||||
|
||||
/**
|
||||
* Created by PM on 15/11/2016.
|
||||
*/
|
||||
object LightningMessageCodecs {
|
||||
|
||||
def attemptFromTry[T](f: => T): Attempt[T] = Try(f) match {
|
||||
case Success(t) => Attempt.successful(t)
|
||||
case Failure(t) => Attempt.failure(Err(s"deserialization error: ${t.getMessage}"))
|
||||
}
|
||||
|
||||
// this codec can be safely used for values < 2^63 and will fail otherwise
|
||||
// (for something smarter see https://github.com/yzernik/bitcoin-scodec/blob/master/src/main/scala/io/github/yzernik/bitcoinscodec/structures/UInt64.scala)
|
||||
val uint64: Codec[Long] = int64.narrow(l => if (l >= 0) Attempt.Successful(l) else Attempt.failure(Err(s"overflow for value $l")), l => l)
|
||||
|
||||
val uint64ex: Codec[UInt64] = bytes(8).xmap(b => UInt64(b), a => a.toByteVector.padLeft(8))
|
||||
|
||||
def bytes32: Codec[ByteVector32] = limitedSizeBytes(32, bytesStrict(32).xmap(d => ByteVector32(d), d => d.bytes))
|
||||
|
||||
def bytes64: Codec[ByteVector64] = limitedSizeBytes(64, bytesStrict(64).xmap(d => ByteVector64(d), d => d.bytes))
|
||||
|
||||
def varsizebinarydata: Codec[ByteVector] = variableSizeBytes(uint16, bytes)
|
||||
|
||||
def listofsignatures: Codec[List[ByteVector64]] = listOfN(uint16, bytes64)
|
||||
|
||||
def ipv4address: Codec[Inet4Address] = bytes(4).xmap(b => InetAddress.getByAddress(b.toArray).asInstanceOf[Inet4Address], a => ByteVector(a.getAddress))
|
||||
|
||||
def ipv6address: Codec[Inet6Address] = bytes(16).exmap(b => attemptFromTry(Inet6Address.getByAddress(null, b.toArray, null)), a => attemptFromTry(ByteVector(a.getAddress)))
|
||||
|
||||
def base32(size: Int): Codec[String] = bytes(size).xmap(b => new Base32().encodeAsString(b.toArray).toLowerCase, a => ByteVector(new Base32().decode(a.toUpperCase())))
|
||||
|
||||
def nodeaddress: Codec[NodeAddress] =
|
||||
discriminated[NodeAddress].by(uint8)
|
||||
.typecase(1, (ipv4address :: uint16).as[IPv4])
|
||||
.typecase(2, (ipv6address :: uint16).as[IPv6])
|
||||
.typecase(3, (base32(10) :: uint16).as[Tor2])
|
||||
.typecase(4, (base32(35) :: uint16).as[Tor3])
|
||||
|
||||
// this one is a bit different from most other codecs: the first 'len' element is *not* the number of items
|
||||
// in the list but rather the number of bytes of the encoded list. The rationale is once we've read this
|
||||
// number of bytes we can just skip to the next field
|
||||
def listofnodeaddresses: Codec[List[NodeAddress]] = variableSizeBytes(uint16, list(nodeaddress))
|
||||
|
||||
def shortchannelid: Codec[ShortChannelId] = int64.xmap(l => ShortChannelId(l), s => s.toLong)
|
||||
|
||||
def privateKey: Codec[PrivateKey] = Codec[PrivateKey](
|
||||
(priv: PrivateKey) => bytes(32).encode(priv.value),
|
||||
(wire: BitVector) => bytes(32).decode(wire).map(_.map(b => PrivateKey(b)))
|
||||
)
|
||||
|
||||
def publicKey: Codec[PublicKey] = Codec[PublicKey](
|
||||
(pub: PublicKey) => bytes(33).encode(pub.value),
|
||||
(wire: BitVector) => bytes(33).decode(wire).map(_.map(b => PublicKey(b)))
|
||||
)
|
||||
|
||||
def rgb: Codec[Color] = bytes(3).xmap(buf => Color(buf(0), buf(1), buf(2)), t => ByteVector(t.r, t.g, t.b))
|
||||
|
||||
def zeropaddedstring(size: Int): Codec[String] = fixedSizeBytes(32, utf8).xmap(s => s.takeWhile(_ != '\u0000'), s => s)
|
||||
|
||||
val initCodec: Codec[Init] = (
|
||||
("globalFeatures" | varsizebinarydata) ::
|
||||
("localFeatures" | varsizebinarydata)).as[Init]
|
||||
|
@ -107,20 +45,20 @@ object LightningMessageCodecs {
|
|||
|
||||
val channelReestablishCodec: Codec[ChannelReestablish] = (
|
||||
("channelId" | bytes32) ::
|
||||
("nextLocalCommitmentNumber" | uint64) ::
|
||||
("nextRemoteRevocationNumber" | uint64) ::
|
||||
("nextLocalCommitmentNumber" | uint64overflow) ::
|
||||
("nextRemoteRevocationNumber" | uint64overflow) ::
|
||||
("yourLastPerCommitmentSecret" | optional(bitsRemaining, privateKey)) ::
|
||||
("myCurrentPerCommitmentPoint" | optional(bitsRemaining, publicKey))).as[ChannelReestablish]
|
||||
|
||||
val openChannelCodec: Codec[OpenChannel] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("temporaryChannelId" | bytes32) ::
|
||||
("fundingSatoshis" | uint64) ::
|
||||
("pushMsat" | uint64) ::
|
||||
("dustLimitSatoshis" | uint64) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64ex) ::
|
||||
("channelReserveSatoshis" | uint64) ::
|
||||
("htlcMinimumMsat" | uint64) ::
|
||||
("fundingSatoshis" | uint64overflow) ::
|
||||
("pushMsat" | uint64overflow) ::
|
||||
("dustLimitSatoshis" | uint64overflow) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserveSatoshis" | uint64overflow) ::
|
||||
("htlcMinimumMsat" | uint64overflow) ::
|
||||
("feeratePerKw" | uint32) ::
|
||||
("toSelfDelay" | uint16) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
|
@ -134,10 +72,10 @@ object LightningMessageCodecs {
|
|||
|
||||
val acceptChannelCodec: Codec[AcceptChannel] = (
|
||||
("temporaryChannelId" | bytes32) ::
|
||||
("dustLimitSatoshis" | uint64) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64ex) ::
|
||||
("channelReserveSatoshis" | uint64) ::
|
||||
("htlcMinimumMsat" | uint64) ::
|
||||
("dustLimitSatoshis" | uint64overflow) ::
|
||||
("maxHtlcValueInFlightMsat" | uint64) ::
|
||||
("channelReserveSatoshis" | uint64overflow) ::
|
||||
("htlcMinimumMsat" | uint64overflow) ::
|
||||
("minimumDepth" | uint32) ::
|
||||
("toSelfDelay" | uint16) ::
|
||||
("maxAcceptedHtlcs" | uint16) ::
|
||||
|
@ -168,30 +106,30 @@ object LightningMessageCodecs {
|
|||
|
||||
val closingSignedCodec: Codec[ClosingSigned] = (
|
||||
("channelId" | bytes32) ::
|
||||
("feeSatoshis" | uint64) ::
|
||||
("feeSatoshis" | uint64overflow) ::
|
||||
("signature" | bytes64)).as[ClosingSigned]
|
||||
|
||||
val updateAddHtlcCodec: Codec[UpdateAddHtlc] = (
|
||||
("channelId" | bytes32) ::
|
||||
("id" | uint64) ::
|
||||
("amountMsat" | uint64) ::
|
||||
("id" | uint64overflow) ::
|
||||
("amountMsat" | uint64overflow) ::
|
||||
("paymentHash" | bytes32) ::
|
||||
("expiry" | uint32) ::
|
||||
("onionRoutingPacket" | bytes(Sphinx.PacketLength))).as[UpdateAddHtlc]
|
||||
|
||||
val updateFulfillHtlcCodec: Codec[UpdateFulfillHtlc] = (
|
||||
("channelId" | bytes32) ::
|
||||
("id" | uint64) ::
|
||||
("id" | uint64overflow) ::
|
||||
("paymentPreimage" | bytes32)).as[UpdateFulfillHtlc]
|
||||
|
||||
val updateFailHtlcCodec: Codec[UpdateFailHtlc] = (
|
||||
("channelId" | bytes32) ::
|
||||
("id" | uint64) ::
|
||||
("id" | uint64overflow) ::
|
||||
("reason" | varsizebinarydata)).as[UpdateFailHtlc]
|
||||
|
||||
val updateFailMalformedHtlcCodec: Codec[UpdateFailMalformedHtlc] = (
|
||||
("channelId" | bytes32) ::
|
||||
("id" | uint64) ::
|
||||
("id" | uint64overflow) ::
|
||||
("onionHash" | bytes32) ::
|
||||
("failureCode" | uint16)).as[UpdateFailMalformedHtlc]
|
||||
|
||||
|
@ -216,14 +154,13 @@ object LightningMessageCodecs {
|
|||
("nodeSignature" | bytes64) ::
|
||||
("bitcoinSignature" | bytes64)).as[AnnouncementSignatures]
|
||||
|
||||
val channelAnnouncementWitnessCodec = (
|
||||
("features" | varsizebinarydata) ::
|
||||
val channelAnnouncementWitnessCodec = ("features" | varsizebinarydata) ::
|
||||
("chainHash" | bytes32) ::
|
||||
("shortChannelId" | shortchannelid) ::
|
||||
("nodeId1" | publicKey) ::
|
||||
("nodeId2" | publicKey) ::
|
||||
("bitcoinKey1" | publicKey) ::
|
||||
("bitcoinKey2" | publicKey))
|
||||
("bitcoinKey2" | publicKey)
|
||||
|
||||
val channelAnnouncementCodec: Codec[ChannelAnnouncement] = (
|
||||
("nodeSignature1" | bytes64) ::
|
||||
|
@ -232,13 +169,12 @@ object LightningMessageCodecs {
|
|||
("bitcoinSignature2" | bytes64) ::
|
||||
channelAnnouncementWitnessCodec).as[ChannelAnnouncement]
|
||||
|
||||
val nodeAnnouncementWitnessCodec = (
|
||||
("features" | varsizebinarydata) ::
|
||||
val nodeAnnouncementWitnessCodec = ("features" | varsizebinarydata) ::
|
||||
("timestamp" | uint32) ::
|
||||
("nodeId" | publicKey) ::
|
||||
("rgbColor" | rgb) ::
|
||||
("alias" | zeropaddedstring(32)) ::
|
||||
("addresses" | listofnodeaddresses))
|
||||
("addresses" | listofnodeaddresses)
|
||||
|
||||
val nodeAnnouncementCodec: Codec[NodeAnnouncement] = (
|
||||
("signature" | bytes64) ::
|
||||
|
@ -251,10 +187,10 @@ object LightningMessageCodecs {
|
|||
(("messageFlags" | byte) >>:~ { messageFlags =>
|
||||
("channelFlags" | byte) ::
|
||||
("cltvExpiryDelta" | uint16) ::
|
||||
("htlcMinimumMsat" | uint64) ::
|
||||
("htlcMinimumMsat" | uint64overflow) ::
|
||||
("feeBaseMsat" | uint32) ::
|
||||
("feeProportionalMillionths" | uint32) ::
|
||||
("htlcMaximumMsat" | conditional((messageFlags & 1) != 0, uint64))
|
||||
("htlcMaximumMsat" | conditional((messageFlags & 1) != 0, uint64overflow))
|
||||
})
|
||||
|
||||
val channelUpdateCodec: Codec[ChannelUpdate] = (
|
||||
|
@ -285,31 +221,6 @@ object LightningMessageCodecs {
|
|||
("data" | varsizebinarydata)
|
||||
).as[ReplyChannelRange]
|
||||
|
||||
val queryShortChannelIdsExCodec: Codec[QueryShortChannelIdsEx] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("flag" | byte) ::
|
||||
("data" | varsizebinarydata)
|
||||
).as[QueryShortChannelIdsEx]
|
||||
|
||||
val replyShortChanelIdsEndExCodec: Codec[ReplyShortChannelIdsEndEx] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("complete" | byte)
|
||||
).as[ReplyShortChannelIdsEndEx]
|
||||
|
||||
val queryChannelRangeExCodec: Codec[QueryChannelRangeEx] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("firstBlockNum" | uint32) ::
|
||||
("numberOfBlocks" | uint32)
|
||||
).as[QueryChannelRangeEx]
|
||||
|
||||
val replyChannelRangeExCodec: Codec[ReplyChannelRangeEx] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("firstBlockNum" | uint32) ::
|
||||
("numberOfBlocks" | uint32) ::
|
||||
("complete" | byte) ::
|
||||
("data" | varsizebinarydata)
|
||||
).as[ReplyChannelRangeEx]
|
||||
|
||||
val gossipTimestampFilterCodec: Codec[GossipTimestampFilter] = (
|
||||
("chainHash" | bytes32) ::
|
||||
("firstTimestamp" | uint32) ::
|
||||
|
@ -345,40 +256,11 @@ object LightningMessageCodecs {
|
|||
.typecase(263, queryChannelRangeCodec)
|
||||
.typecase(264, replyChannelRangeCodec)
|
||||
.typecase(265, gossipTimestampFilterCodec)
|
||||
.typecase(1001, queryShortChannelIdsExCodec)
|
||||
.typecase(1002, replyShortChanelIdsEndExCodec)
|
||||
.typecase(1003, queryChannelRangeExCodec)
|
||||
.typecase(1004, replyChannelRangeExCodec)
|
||||
|
||||
|
||||
/**
|
||||
* A codec that caches serialized routing messages
|
||||
*/
|
||||
val cachedLightningMessageCodec = new Codec[LightningMessage] {
|
||||
|
||||
override def sizeBound: SizeBound = lightningMessageCodec.sizeBound
|
||||
|
||||
val cache = CacheBuilder
|
||||
.newBuilder
|
||||
.weakKeys() // will cleanup values when keys are garbage collected
|
||||
.build(new CacheLoader[LightningMessage, Attempt[BitVector]] {
|
||||
override def load(key: LightningMessage): Attempt[BitVector] = lightningMessageCodec.encode(key)
|
||||
})
|
||||
|
||||
override def encode(value: LightningMessage): Attempt[BitVector] = value match {
|
||||
case _: ChannelAnnouncement => cache.get(value) // we only cache serialized routing messages
|
||||
case _: NodeAnnouncement => cache.get(value) // we only cache serialized routing messages
|
||||
case _: ChannelUpdate => cache.get(value) // we only cache serialized routing messages
|
||||
case _ => lightningMessageCodec.encode(value)
|
||||
}
|
||||
|
||||
override def decode(bits: BitVector): Attempt[DecodeResult[LightningMessage]] = lightningMessageCodec.decode(bits)
|
||||
}
|
||||
|
||||
val perHopPayloadCodec: Codec[PerHopPayload] = (
|
||||
("realm" | constant(ByteVector.fromByte(0))) ::
|
||||
("short_channel_id" | shortchannelid) ::
|
||||
("amt_to_forward" | uint64) ::
|
||||
("amt_to_forward" | uint64overflow) ::
|
||||
("outgoing_cltv_value" | uint32) ::
|
||||
("unused_with_v0_version_on_header" | ignore(8 * 12))).as[PerHopPayload]
|
||||
|
||||
|
|
|
@ -181,10 +181,6 @@ object NodeAddress {
|
|||
*
|
||||
* We don't attempt to resolve onion addresses (it will be done by the tor proxy), so we just recognize them based on
|
||||
* the .onion TLD and rely on their length to separate v2/v3.
|
||||
*
|
||||
* @param host
|
||||
* @param port
|
||||
* @return
|
||||
*/
|
||||
def fromParts(host: String, port: Int): Try[NodeAddress] = Try {
|
||||
host match {
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import scodec.{Attempt, Codec}
|
||||
import scodec.codecs._
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
/**
|
||||
* Created by t-bast on 20/06/2019.
|
||||
*/
|
||||
|
||||
object TlvCodecs {
|
||||
|
||||
val genericTlv: Codec[GenericTlv] = (("type" | varint) :: variableSizeBytesLong(varintoverflow, bytes)).as[GenericTlv]
|
||||
|
||||
def tlvFallback(codec: Codec[Tlv]): Codec[Tlv] = discriminatorFallback(genericTlv, codec).xmap({
|
||||
case Left(l) => l
|
||||
case Right(r) => r
|
||||
}, {
|
||||
case g: GenericTlv => Left(g)
|
||||
case o => Right(o)
|
||||
})
|
||||
|
||||
/**
|
||||
* A tlv stream codec relies on an underlying tlv codec.
|
||||
* This allows tlv streams to have different namespaces, increasing the total number of tlv types available.
|
||||
*
|
||||
* @param codec codec used for the tlv records contained in the stream.
|
||||
*/
|
||||
def tlvStream(codec: Codec[Tlv]): Codec[TlvStream] = list(codec).exmap(
|
||||
records => Attempt.fromTry(Try(TlvStream(records))),
|
||||
stream => Attempt.successful(stream.records.toList)
|
||||
)
|
||||
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import fr.acinq.eclair.UInt64
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
import scala.annotation.tailrec
|
||||
|
||||
/**
|
||||
* Created by t-bast on 20/06/2019.
|
||||
*/
|
||||
|
||||
// @formatter:off
|
||||
trait Tlv {
|
||||
val `type`: UInt64
|
||||
}
|
||||
sealed trait OnionTlv extends Tlv
|
||||
// @formatter:on
|
||||
|
||||
/**
|
||||
* Generic tlv type we fallback to if we don't understand the incoming type.
|
||||
*
|
||||
* @param `type` tlv type.
|
||||
* @param value tlv value (length is implicit, and encoded as a varint).
|
||||
*/
|
||||
case class GenericTlv(`type`: UInt64, value: ByteVector) extends Tlv
|
||||
|
||||
/**
|
||||
* A tlv stream is a collection of tlv records.
|
||||
* A tlv stream is part of a given namespace that dictates how to parse the tlv records.
|
||||
* That namespace is indicated by a trait extending the top-level tlv trait.
|
||||
*
|
||||
* @param records tlv records.
|
||||
*/
|
||||
case class TlvStream(records: Seq[Tlv]) {
|
||||
|
||||
records.foldLeft(Option.empty[Tlv]) {
|
||||
case (None, record) =>
|
||||
require(!record.isInstanceOf[GenericTlv] || record.`type`.toBigInt % 2 != 0, "tlv streams must not contain unknown even tlv types")
|
||||
Some(record)
|
||||
case (Some(previousRecord), record) =>
|
||||
require(record.`type` != previousRecord.`type`, "tlv streams must not contain duplicate records")
|
||||
require(record.`type` > previousRecord.`type`, "tlv records must be ordered by monotonically-increasing types")
|
||||
require(!record.isInstanceOf[GenericTlv] || record.`type`.toBigInt % 2 != 0, "tlv streams must not contain unknown even tlv types")
|
||||
Some(record)
|
||||
}
|
||||
|
||||
}
|
1
eclair-core/src/test/resources/api/usablebalances
Normal file
1
eclair-core/src/test/resources/api/usablebalances
Normal file
|
@ -0,0 +1 @@
|
|||
[{"remoteNodeId":"03af0ed6052cf28d670665549bc86f4b721c9fdb309d40c58f5811f63966e005d0","shortChannelId":"0x0x1","canSendMsat":100000000,"canReceiveMsat":20000000,"isPublic":true},{"remoteNodeId":"03af0ed6052cf28d670665549bc86f4b721c9fdb309d40c58f5811f63966e005d0","shortChannelId":"0x0x2","canSendMsat":400000000,"canReceiveMsat":30000000,"isPublic":false}]
|
|
@ -1,7 +1,6 @@
|
|||
regtest=1
|
||||
noprinttoconsole=1
|
||||
server=1
|
||||
port=28333
|
||||
rpcuser=foo
|
||||
rpcpassword=bar
|
||||
txindex=1
|
||||
|
@ -10,4 +9,6 @@ zmqpubrawtx=tcp://127.0.0.1:28335
|
|||
rpcworkqueue=64
|
||||
addresstype=bech32
|
||||
[regtest]
|
||||
bind=127.0.0.1
|
||||
port=28333
|
||||
rpcport=28332
|
||||
|
|
|
@ -27,5 +27,4 @@ object TestUtils {
|
|||
.props
|
||||
.get("buildDirectory") // this is defined if we run from maven
|
||||
.getOrElse(new File(sys.props("user.dir"), "target").getAbsolutePath) // otherwise we probably are in intellij, so we build it manually assuming that user.dir == path to the module
|
||||
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import scodec.bits._
|
|||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
import scala.concurrent.duration._
|
||||
|
||||
|
||||
class ElectrumClientSpec extends TestKit(ActorSystem("test")) with FunSuiteLike with Logging with BeforeAndAfterAll {
|
||||
|
||||
import ElectrumClient._
|
||||
|
@ -38,6 +37,22 @@ class ElectrumClientSpec extends TestKit(ActorSystem("test")) with FunSuiteLike
|
|||
// this is tx #2690 of block #500000
|
||||
val referenceTx = Transaction.read("0200000001983c5b32ced1de5ae97d3ce9b7436f8bb0487d15bf81e5cae97b1e238dc395c6000000006a47304402205957c75766e391350eba2c7b752f0056cb34b353648ecd0992a8a81fc9bcfe980220629c286592842d152cdde71177cd83086619744a533f262473298cacf60193500121021b8b51f74dbf0ac1e766d162c8707b5e8d89fc59da0796f3b4505e7c0fb4cf31feffffff0276bd0101000000001976a914219de672ba773aa0bc2e15cdd9d2e69b734138fa88ac3e692001000000001976a914301706dede031e9fb4b60836e073a4761855f6b188ac09a10700")
|
||||
val scriptHash = Crypto.sha256(referenceTx.txOut(0).publicKeyScript).reverse
|
||||
val height = 500000
|
||||
val position = 2690
|
||||
val merkleProof = List(
|
||||
hex"b500cd85cd6c7e0e570b82728dd516646536a477b61cc82056505d84a5820dc3",
|
||||
hex"c98798c2e576566a92b23d2405f59d95c506966a6e26fecfb356d6447a199546",
|
||||
hex"930d95c428546812fd11f8242904a9a1ba05d2140cd3a83be0e2ed794821c9ec",
|
||||
hex"90c97965b12f4262fe9bf95bc37ff7d6362902745eaa822ecf0cf85801fa8b48",
|
||||
hex"23792d51fddd6e439ed4c92ad9f19a9b73fc9d5c52bdd69039be70ad6619a1aa",
|
||||
hex"4b73075f29a0abdcec2c83c2cfafc5f304d2c19dcacb50a88a023df725468760",
|
||||
hex"f80225a32a5ce4ef0703822c6aa29692431a816dec77d9b1baa5b09c3ba29bfb",
|
||||
hex"4858ac33f2022383d3b4dd674666a0880557d02a155073be93231a02ecbb81f4",
|
||||
hex"eb5b142030ed4e0b55a8ba5a7b5b783a0a24e0c2fd67c1cfa2f7b308db00c38a",
|
||||
hex"86858812c3837d209110f7ea79de485abdfd22039467a8aa15a8d85856ee7d30",
|
||||
hex"de20eb85f2e9ad525a6fb5c618682b6bdce2fa83df836a698f31575c4e5b3d38",
|
||||
hex"98bd1048e04ff1b0af5856d9890cd708d8d67ad6f3a01f777130fbc16810eeb3")
|
||||
.map(ByteVector32(_))
|
||||
|
||||
override protected def beforeAll(): Unit = {
|
||||
client = system.actorOf(Props(new ElectrumClient(new InetSocketAddress("electrum.acinq.co", 50002), SSL.STRICT)), "electrum-client")
|
||||
|
@ -52,6 +67,16 @@ class ElectrumClientSpec extends TestKit(ActorSystem("test")) with FunSuiteLike
|
|||
probe.expectMsgType[ElectrumReady](15 seconds)
|
||||
}
|
||||
|
||||
test("get transaction id from position") {
|
||||
probe.send(client, GetTransactionIdFromPosition(height, position))
|
||||
probe.expectMsg(GetTransactionIdFromPositionResponse(referenceTx.txid, height, position, Nil))
|
||||
}
|
||||
|
||||
test("get transaction id from position with merkle proof") {
|
||||
probe.send(client, GetTransactionIdFromPosition(height, position, merkle = true))
|
||||
probe.expectMsg(GetTransactionIdFromPositionResponse(referenceTx.txid, height, position, merkleProof))
|
||||
}
|
||||
|
||||
test("get transaction") {
|
||||
probe.send(client, GetTransaction(referenceTx.txid))
|
||||
val GetTransactionResponse(tx) = probe.expectMsgType[GetTransactionResponse]
|
||||
|
@ -98,7 +123,7 @@ class ElectrumClientSpec extends TestKit(ActorSystem("test")) with FunSuiteLike
|
|||
test("get scripthash history") {
|
||||
probe.send(client, GetScriptHashHistory(scriptHash))
|
||||
val GetScriptHashHistoryResponse(scriptHash1, history) = probe.expectMsgType[GetScriptHashHistoryResponse]
|
||||
assert(history.contains((TransactionHistoryItem(500000, referenceTx.txid))))
|
||||
assert(history.contains(TransactionHistoryItem(500000, referenceTx.txid)))
|
||||
}
|
||||
|
||||
test("list script unspents") {
|
||||
|
@ -106,4 +131,5 @@ class ElectrumClientSpec extends TestKit(ActorSystem("test")) with FunSuiteLike
|
|||
val ScriptHashListUnspentResponse(scriptHash1, unspents) = probe.expectMsgType[ScriptHashListUnspentResponse]
|
||||
assert(unspents.isEmpty)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ trait StateTestsHelperMethods extends TestKitBase {
|
|||
bob2blockchain.expectMsgType[WatchConfirmed] // deeply buried
|
||||
awaitCond(alice.stateName == NORMAL)
|
||||
awaitCond(bob.stateName == NORMAL)
|
||||
assert(bob.stateData.asInstanceOf[DATA_NORMAL].commitments.availableBalanceForSendMsat == pushMsat - TestConstants.Alice.channelParams.channelReserveSatoshis * 1000)
|
||||
assert(bob.stateData.asInstanceOf[DATA_NORMAL].commitments.availableBalanceForSendMsat == math.max(pushMsat - TestConstants.Alice.channelParams.channelReserveSatoshis * 1000, 0))
|
||||
// x2 because alice and bob share the same relayer
|
||||
channelUpdateListener.expectMsgType[LocalChannelUpdate]
|
||||
channelUpdateListener.expectMsgType[LocalChannelUpdate]
|
||||
|
|
|
@ -23,7 +23,7 @@ import akka.io.Tcp
|
|||
import akka.testkit.{TestActorRef, TestFSMRef, TestKit, TestProbe}
|
||||
import fr.acinq.eclair.crypto.Noise.{Chacha20Poly1305CipherFunctions, CipherState}
|
||||
import fr.acinq.eclair.crypto.TransportHandler.{Encryptor, ExtendedCipherState, Listener}
|
||||
import fr.acinq.eclair.wire.LightningMessageCodecs
|
||||
import fr.acinq.eclair.wire.CommonCodecs
|
||||
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}
|
||||
import scodec.Codec
|
||||
import scodec.bits._
|
||||
|
@ -49,8 +49,8 @@ class TransportHandlerSpec extends TestKit(ActorSystem("test")) with FunSuiteLik
|
|||
val pipe = system.actorOf(Props[MyPipe])
|
||||
val probe1 = TestProbe()
|
||||
val probe2 = TestProbe()
|
||||
val initiator = TestFSMRef(new TransportHandler(Initiator.s, Some(Responder.s.pub), pipe, LightningMessageCodecs.varsizebinarydata))
|
||||
val responder = TestFSMRef(new TransportHandler(Responder.s, None, pipe, LightningMessageCodecs.varsizebinarydata))
|
||||
val initiator = TestFSMRef(new TransportHandler(Initiator.s, Some(Responder.s.pub), pipe, CommonCodecs.varsizebinarydata))
|
||||
val responder = TestFSMRef(new TransportHandler(Responder.s, None, pipe, CommonCodecs.varsizebinarydata))
|
||||
pipe ! (initiator, responder)
|
||||
|
||||
awaitCond(initiator.stateName == TransportHandler.WaitingForListener)
|
||||
|
@ -111,8 +111,8 @@ class TransportHandlerSpec extends TestKit(ActorSystem("test")) with FunSuiteLik
|
|||
val pipe = system.actorOf(Props[MyPipeSplitter])
|
||||
val probe1 = TestProbe()
|
||||
val probe2 = TestProbe()
|
||||
val initiator = TestFSMRef(new TransportHandler(Initiator.s, Some(Responder.s.pub), pipe, LightningMessageCodecs.varsizebinarydata))
|
||||
val responder = TestFSMRef(new TransportHandler(Responder.s, None, pipe, LightningMessageCodecs.varsizebinarydata))
|
||||
val initiator = TestFSMRef(new TransportHandler(Initiator.s, Some(Responder.s.pub), pipe, CommonCodecs.varsizebinarydata))
|
||||
val responder = TestFSMRef(new TransportHandler(Responder.s, None, pipe, CommonCodecs.varsizebinarydata))
|
||||
pipe ! (initiator, responder)
|
||||
|
||||
awaitCond(initiator.stateName == TransportHandler.WaitingForListener)
|
||||
|
|
|
@ -109,7 +109,7 @@ class PeerSpec extends TestkitBaseClass {
|
|||
probe.expectMsg(s"no address found")
|
||||
}
|
||||
|
||||
// on Andoird we don't store node announcements
|
||||
// On Android we don't store node announcements
|
||||
ignore("if no address was specified during connection use the one from node_announcement", Tag("with_node_announcements")) { f =>
|
||||
import f._
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
package fr.acinq.eclair.payment
|
||||
|
||||
import fr.acinq.bitcoin.Block
|
||||
import fr.acinq.bitcoin.{Block, ByteVector32}
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.eclair.channel.{AddHtlcFailed, CMD_ADD_HTLC, CMD_FAIL_HTLC}
|
||||
import fr.acinq.eclair.crypto.Sphinx
|
||||
|
@ -24,6 +24,7 @@ import fr.acinq.eclair.payment.Relayer.{OutgoingChannel, RelayPayload}
|
|||
import fr.acinq.eclair.router.Announcements
|
||||
import fr.acinq.eclair.wire._
|
||||
import fr.acinq.eclair.{ShortChannelId, randomBytes32, randomKey}
|
||||
import fr.acinq.eclair.payment.HtlcGenerationSpec.makeCommitments
|
||||
import org.scalatest.FunSuite
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
|
@ -81,11 +82,11 @@ class ChannelSelectionSpec extends FunSuite {
|
|||
val channelUpdate = dummyUpdate(ShortChannelId(12345), 10, 100, 1000, 100, 10000000, true)
|
||||
|
||||
val channelUpdates = Map(
|
||||
ShortChannelId(11111) -> OutgoingChannel(a, channelUpdate, 100000000),
|
||||
ShortChannelId(12345) -> OutgoingChannel(a, channelUpdate, 20000000),
|
||||
ShortChannelId(22222) -> OutgoingChannel(a, channelUpdate, 10000000),
|
||||
ShortChannelId(33333) -> OutgoingChannel(a, channelUpdate, 100000),
|
||||
ShortChannelId(44444) -> OutgoingChannel(b, channelUpdate, 1000000)
|
||||
ShortChannelId(11111) -> OutgoingChannel(a, channelUpdate, makeCommitments(ByteVector32.Zeroes, 100000000)),
|
||||
ShortChannelId(12345) -> OutgoingChannel(a, channelUpdate, makeCommitments(ByteVector32.Zeroes, 20000000)),
|
||||
ShortChannelId(22222) -> OutgoingChannel(a, channelUpdate, makeCommitments(ByteVector32.Zeroes, 10000000)),
|
||||
ShortChannelId(33333) -> OutgoingChannel(a, channelUpdate, makeCommitments(ByteVector32.Zeroes, 100000)),
|
||||
ShortChannelId(44444) -> OutgoingChannel(b, channelUpdate, makeCommitments(ByteVector32.Zeroes, 1000000))
|
||||
)
|
||||
|
||||
val node2channels = new mutable.HashMap[PublicKey, mutable.Set[ShortChannelId]] with mutable.MultiMap[PublicKey, ShortChannelId]
|
||||
|
|
|
@ -19,8 +19,8 @@ package fr.acinq.eclair.payment
|
|||
import java.util.UUID
|
||||
|
||||
import fr.acinq.bitcoin.DeterministicWallet.ExtendedPrivateKey
|
||||
import fr.acinq.bitcoin.{Block, Crypto, DeterministicWallet}
|
||||
import fr.acinq.eclair.channel.Channel
|
||||
import fr.acinq.bitcoin.{Block, ByteVector32, Crypto, DeterministicWallet}
|
||||
import fr.acinq.eclair.channel.{Channel, Commitments}
|
||||
import fr.acinq.eclair.crypto.Sphinx
|
||||
import fr.acinq.eclair.crypto.Sphinx.{PacketAndSecrets, ParsedPacket}
|
||||
import fr.acinq.eclair.payment.PaymentLifecycle._
|
||||
|
@ -151,6 +151,12 @@ class HtlcGenerationSpec extends FunSuite {
|
|||
|
||||
object HtlcGenerationSpec {
|
||||
|
||||
def makeCommitments(channelId: ByteVector32, availableBalanceForSend: Long = 50000000L, availableBalanceForReceive: Long = 50000000L) =
|
||||
new Commitments(null, null, 0.toByte, null, null, null, null, 0, 0, Map.empty, null, null, null, channelId) {
|
||||
override lazy val availableBalanceForSendMsat: Long = availableBalanceForSend.max(0)
|
||||
override lazy val availableBalanceForReceiveMsat: Long = availableBalanceForReceive.max(0)
|
||||
}
|
||||
|
||||
def randomExtendedPrivateKey: ExtendedPrivateKey = DeterministicWallet.generate(randomBytes32)
|
||||
|
||||
val (priv_a, priv_b, priv_c, priv_d, priv_e) = (TestConstants.Alice.keyManager.nodeKey, TestConstants.Bob.keyManager.nodeKey, randomExtendedPrivateKey, randomExtendedPrivateKey, randomExtendedPrivateKey)
|
||||
|
|
|
@ -262,6 +262,12 @@ class PaymentRequestSpec extends FunSuite {
|
|||
assert(PaymentRequest.write(PaymentRequest.read(input.toUpperCase())) == input)
|
||||
}
|
||||
|
||||
test("Pay 1 BTC without multiplier") {
|
||||
val ref = "lnbc11pdkmqhupp5n2ees808r98m0rh4472yyth0c5fptzcxmexcjznrzmq8xald0cgqdqsf4ujqarfwqsxymmccqp2xvtsv5tc743wgctlza8k3zlpxucl7f3kvjnjptv7xz0nkaww307sdyrvgke2w8kmq7dgz4lkasfn0zvplc9aa4gp8fnhrwfjny0j59sq42x9gp"
|
||||
val pr = PaymentRequest.read(ref)
|
||||
assert(pr.amount.contains(MilliSatoshi(100000000000L)))
|
||||
}
|
||||
|
||||
test("nonreg") {
|
||||
val requests = List(
|
||||
"lnbc40n1pw9qjvwpp5qq3w2ln6krepcslqszkrsfzwy49y0407hvks30ec6pu9s07jur3sdpstfshq5n9v9jzucm0d5s8vmm5v5s8qmmnwssyj3p6yqenwdencqzysxqrrss7ju0s4dwx6w8a95a9p2xc5vudl09gjl0w2n02sjrvffde632nxwh2l4w35nqepj4j5njhh4z65wyfc724yj6dn9wajvajfn5j7em6wsq2elakl",
|
||||
|
|
|
@ -25,9 +25,8 @@ import fr.acinq.eclair.channel._
|
|||
import fr.acinq.eclair.crypto.Sphinx
|
||||
import fr.acinq.eclair.payment.PaymentLifecycle.buildCommand
|
||||
import fr.acinq.eclair.router.Announcements
|
||||
import fr.acinq.eclair.transactions.CommitmentSpec
|
||||
import fr.acinq.eclair.wire._
|
||||
import fr.acinq.eclair.{ShortChannelId, TestConstants, TestkitBaseClass, UInt64, randomBytes32, randomKey}
|
||||
import fr.acinq.eclair._
|
||||
import org.scalatest.Outcome
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
|
@ -58,11 +57,6 @@ class RelayerSpec extends TestkitBaseClass {
|
|||
val channelId_ab = randomBytes32
|
||||
val channelId_bc = randomBytes32
|
||||
|
||||
def makeCommitments(channelId: ByteVector32, availableBalanceMsat: Long = 50000000L) = new Commitments(null, null, 0.toByte, null, null,
|
||||
null, null, 0, 0, Map.empty, null, null, null, channelId) {
|
||||
override def availableBalanceForSendMsat: Long = availableBalanceMsat
|
||||
}
|
||||
|
||||
test("relay an htlc-add") { f =>
|
||||
import f._
|
||||
val sender = TestProbe()
|
||||
|
@ -97,7 +91,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||
|
||||
// this is another channel B-C, with less balance (it will be preferred)
|
||||
val (channelId_bc_1, channelUpdate_bc_1) = (randomBytes32, channelUpdate_bc.copy(shortChannelId = ShortChannelId("500000x1x1")))
|
||||
relayer ! LocalChannelUpdate(null, channelId_bc_1, channelUpdate_bc_1.shortChannelId, c, None, channelUpdate_bc_1, makeCommitments(channelId_bc_1, availableBalanceMsat = 49000000L))
|
||||
relayer ! LocalChannelUpdate(null, channelId_bc_1, channelUpdate_bc_1.shortChannelId, c, None, channelUpdate_bc_1, makeCommitments(channelId_bc_1, 49000000L))
|
||||
|
||||
sender.send(relayer, ForwardAdd(add_ab))
|
||||
|
||||
|
@ -416,4 +410,37 @@ class RelayerSpec extends TestkitBaseClass {
|
|||
assert(fwd.channelId === origin.originChannelId)
|
||||
assert(fwd.message.id === origin.originHtlcId)
|
||||
}
|
||||
|
||||
test("get usable balances") { f =>
|
||||
import f._
|
||||
val sender = TestProbe()
|
||||
relayer ! LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab, makeCommitments(channelId_ab, -2000, 300000))
|
||||
relayer ! LocalChannelUpdate(null, channelId_bc, channelUpdate_bc.shortChannelId, c, None, channelUpdate_bc, makeCommitments(channelId_bc, 400000, -5000))
|
||||
sender.send(relayer, GetUsableBalances)
|
||||
val usableBalances1 = sender.expectMsgType[Iterable[UsableBalances]]
|
||||
assert(usableBalances1.size === 2)
|
||||
assert(usableBalances1.head.canSendMsat === 0 && usableBalances1.head.canReceiveMsat === 300000 && usableBalances1.head.shortChannelId == channelUpdate_ab.shortChannelId)
|
||||
assert(usableBalances1.last.canReceiveMsat === 0 && usableBalances1.last.canSendMsat === 400000 && usableBalances1.last.shortChannelId == channelUpdate_bc.shortChannelId)
|
||||
|
||||
relayer ! AvailableBalanceChanged(null, channelId_bc, channelUpdate_bc.shortChannelId, 0, makeCommitments(channelId_bc, 200000, 500000))
|
||||
sender.send(relayer, GetUsableBalances)
|
||||
val usableBalances2 = sender.expectMsgType[Iterable[UsableBalances]]
|
||||
assert(usableBalances2.last.canReceiveMsat === 500000 && usableBalances2.last.canSendMsat === 200000)
|
||||
|
||||
relayer ! AvailableBalanceChanged(null, channelId_ab, channelUpdate_ab.shortChannelId, 0, makeCommitments(channelId_ab, 100000, 200000))
|
||||
relayer ! LocalChannelDown(null, channelId_bc, channelUpdate_bc.shortChannelId, c)
|
||||
sender.send(relayer, GetUsableBalances)
|
||||
val usableBalances3 = sender.expectMsgType[Iterable[UsableBalances]]
|
||||
assert(usableBalances3.size === 1 && usableBalances3.head.canSendMsat === 100000)
|
||||
|
||||
relayer ! LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab.copy(channelFlags = 2), makeCommitments(channelId_ab, 100000, 200000))
|
||||
sender.send(relayer, GetUsableBalances)
|
||||
val usableBalances4 = sender.expectMsgType[Iterable[UsableBalances]]
|
||||
assert(usableBalances4.isEmpty)
|
||||
|
||||
relayer ! LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab, makeCommitments(channelId_ab, 100000, 200000))
|
||||
sender.send(relayer, GetUsableBalances)
|
||||
val usableBalances5 = sender.expectMsgType[Iterable[UsableBalances]]
|
||||
assert(usableBalances5.size === 1)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,253 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.net.{Inet4Address, Inet6Address, InetAddress}
|
||||
|
||||
import com.google.common.net.InetAddresses
|
||||
import fr.acinq.bitcoin.Crypto.PrivateKey
|
||||
import fr.acinq.eclair.{UInt64, randomBytes32}
|
||||
import fr.acinq.eclair.wire.CommonCodecs._
|
||||
import org.scalatest.FunSuite
|
||||
import scodec.bits.{BitVector, HexStringSyntax}
|
||||
|
||||
/**
|
||||
* Created by t-bast on 20/06/2019.
|
||||
*/
|
||||
|
||||
class CommonCodecsSpec extends FunSuite {
|
||||
|
||||
test("encode/decode with uint64 codec") {
|
||||
val expected = Map(
|
||||
UInt64(0) -> hex"00 00 00 00 00 00 00 00",
|
||||
UInt64(42) -> hex"00 00 00 00 00 00 00 2a",
|
||||
UInt64(6211610197754262546L) -> hex"56 34 12 90 78 56 34 12",
|
||||
UInt64(hex"ff ff ff ff ff ff ff ff") -> hex"ff ff ff ff ff ff ff ff"
|
||||
).mapValues(_.toBitVector)
|
||||
|
||||
for ((uint, ref) <- expected) {
|
||||
val encoded = uint64.encode(uint).require
|
||||
assert(ref === encoded)
|
||||
val decoded = uint64.decode(encoded).require.value
|
||||
assert(uint === decoded)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with uint64L codec") {
|
||||
val expected = Map(
|
||||
UInt64(0) -> hex"00 00 00 00 00 00 00 00",
|
||||
UInt64(42) -> hex"2a 00 00 00 00 00 00 00",
|
||||
UInt64(6211610197754262546L) -> hex"12 34 56 78 90 12 34 56",
|
||||
UInt64(hex"ff ff ff ff ff ff ff ff") -> hex"ff ff ff ff ff ff ff ff"
|
||||
).mapValues(_.toBitVector)
|
||||
|
||||
for ((uint, ref) <- expected) {
|
||||
val encoded = uint64L.encode(uint).require
|
||||
assert(ref === encoded)
|
||||
val decoded = uint64L.decode(encoded).require.value
|
||||
assert(uint === decoded)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with varint codec") {
|
||||
val expected = Map(
|
||||
UInt64(0L) -> hex"00",
|
||||
UInt64(42L) -> hex"2a",
|
||||
UInt64(253L) -> hex"fd fd 00",
|
||||
UInt64(254L) -> hex"fd fe 00",
|
||||
UInt64(255L) -> hex"fd ff 00",
|
||||
UInt64(550L) -> hex"fd 26 02",
|
||||
UInt64(998000L) -> hex"fe 70 3a 0f 00",
|
||||
UInt64(6211610197754262546L) -> hex"ff 12 34 56 78 90 12 34 56",
|
||||
UInt64.MaxValue -> hex"ff ff ff ff ff ff ff ff ff"
|
||||
).mapValues(_.toBitVector)
|
||||
|
||||
for ((uint, ref) <- expected) {
|
||||
val encoded = varint.encode(uint).require
|
||||
assert(ref === encoded, ref)
|
||||
val decoded = varint.decode(encoded).require.value
|
||||
assert(uint === decoded, uint)
|
||||
}
|
||||
}
|
||||
|
||||
test("decode invalid varint") {
|
||||
val testCases = Seq(
|
||||
hex"fd", // truncated
|
||||
hex"fe 01", // truncated
|
||||
hex"fe", // truncated
|
||||
hex"fe 12 34", // truncated
|
||||
hex"ff", // truncated
|
||||
hex"ff 12 34 56 78", // truncated
|
||||
hex"fd 00 00", // not minimally-encoded
|
||||
hex"fd fc 00", // not minimally-encoded
|
||||
hex"fe 00 00 00 00", // not minimally-encoded
|
||||
hex"fe ff ff 00 00", // not minimally-encoded
|
||||
hex"ff 00 00 00 00 00 00 00 00", // not minimally-encoded
|
||||
hex"ff ff ff ff 01 00 00 00 00", // not minimally-encoded
|
||||
hex"ff ff ff ff ff 00 00 00 00" // not minimally-encoded
|
||||
).map(_.toBitVector)
|
||||
|
||||
for (testCase <- testCases) {
|
||||
assert(varint.decode(testCase).isFailure, testCase.toByteVector)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with varlong codec") {
|
||||
val expected = Map(
|
||||
0L -> hex"00",
|
||||
42L -> hex"2a",
|
||||
253L -> hex"fd fd 00",
|
||||
254L -> hex"fd fe 00",
|
||||
255L -> hex"fd ff 00",
|
||||
550L -> hex"fd 26 02",
|
||||
998000L -> hex"fe 70 3a 0f 00",
|
||||
6211610197754262546L -> hex"ff 12 34 56 78 90 12 34 56",
|
||||
Long.MaxValue -> hex"ff ff ff ff ff ff ff ff 7f"
|
||||
).mapValues(_.toBitVector)
|
||||
|
||||
for ((long, ref) <- expected) {
|
||||
val encoded = varintoverflow.encode(long).require
|
||||
assert(ref === encoded, ref)
|
||||
val decoded = varintoverflow.decode(encoded).require.value
|
||||
assert(long === decoded, long)
|
||||
}
|
||||
}
|
||||
|
||||
test("decode invalid varlong") {
|
||||
val testCases = Seq(
|
||||
hex"ff 00 00 00 00 00 00 00 80",
|
||||
hex"ff ff ff ff ff ff ff ff ff"
|
||||
).map(_.toBitVector)
|
||||
|
||||
for (testCase <- testCases) {
|
||||
assert(varintoverflow.decode(testCase).isFailure, testCase.toByteVector)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with rgb codec") {
|
||||
val color = Color(47.toByte, 255.toByte, 142.toByte)
|
||||
val bin = rgb.encode(color).require
|
||||
assert(bin === hex"2f ff 8e".toBitVector)
|
||||
val color2 = rgb.decode(bin).require.value
|
||||
assert(color === color2)
|
||||
}
|
||||
|
||||
test("encode/decode all kind of IPv6 addresses with ipv6address codec") {
|
||||
{
|
||||
// IPv4 mapped
|
||||
val bin = hex"00000000000000000000ffffae8a0b08".toBitVector
|
||||
val ipv6 = Inet6Address.getByAddress(null, bin.toByteArray, null)
|
||||
val bin2 = ipv6address.encode(ipv6).require
|
||||
assert(bin === bin2)
|
||||
}
|
||||
|
||||
{
|
||||
// regular IPv6 address
|
||||
val ipv6 = InetAddresses.forString("1080:0:0:0:8:800:200C:417A").asInstanceOf[Inet6Address]
|
||||
val bin = ipv6address.encode(ipv6).require
|
||||
val ipv62 = ipv6address.decode(bin).require.value
|
||||
assert(ipv6 === ipv62)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with nodeaddress codec") {
|
||||
{
|
||||
val ipv4addr = InetAddress.getByAddress(Array[Byte](192.toByte, 168.toByte, 1.toByte, 42.toByte)).asInstanceOf[Inet4Address]
|
||||
val nodeaddr = IPv4(ipv4addr, 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"01 C0 A8 01 2A 10 87".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val ipv6addr = InetAddress.getByAddress(hex"2001 0db8 0000 85a3 0000 0000 ac1f 8001".toArray).asInstanceOf[Inet6Address]
|
||||
val nodeaddr = IPv6(ipv6addr, 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"02 2001 0db8 0000 85a3 0000 0000 ac1f 8001 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val nodeaddr = Tor2("z4zif3fy7fe7bpg3", 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"03 cf3282ecb8f949f0bcdb 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val nodeaddr = Tor3("mrl2d3ilhctt2vw4qzvmz3etzjvpnc6dczliq5chrxetthgbuczuggyd", 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"04 6457a1ed0b38a73d56dc866accec93ca6af68bc316568874478dc9399cc1a0b3431b03 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with private key codec") {
|
||||
val value = PrivateKey(randomBytes32)
|
||||
val wire = privateKey.encode(value).require
|
||||
assert(wire.length == 256)
|
||||
val value1 = privateKey.decode(wire).require.value
|
||||
assert(value1 == value)
|
||||
}
|
||||
|
||||
test("encode/decode with public key codec") {
|
||||
val value = PrivateKey(randomBytes32).publicKey
|
||||
val wire = CommonCodecs.publicKey.encode(value).require
|
||||
assert(wire.length == 33 * 8)
|
||||
val value1 = CommonCodecs.publicKey.decode(wire).require.value
|
||||
assert(value1 == value)
|
||||
}
|
||||
|
||||
test("encode/decode with zeropaddedstring codec") {
|
||||
val c = zeropaddedstring(32)
|
||||
|
||||
{
|
||||
val alias = "IRATEMONK"
|
||||
val bin = c.encode(alias).require
|
||||
assert(bin === BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.length)(0)))
|
||||
val alias2 = c.decode(bin).require.value
|
||||
assert(alias === alias2)
|
||||
}
|
||||
|
||||
{
|
||||
val alias = "this-alias-is-exactly-32-B-long."
|
||||
val bin = c.encode(alias).require
|
||||
assert(bin === BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.length)(0)))
|
||||
val alias2 = c.decode(bin).require.value
|
||||
assert(alias === alias2)
|
||||
}
|
||||
|
||||
{
|
||||
val alias = "this-alias-is-far-too-long-because-we-are-limited-to-32-bytes"
|
||||
assert(c.encode(alias).isFailure)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode UInt64") {
|
||||
val codec = uint64
|
||||
Seq(
|
||||
UInt64(hex"ffffffffffffffff"),
|
||||
UInt64(hex"fffffffffffffffe"),
|
||||
UInt64(hex"efffffffffffffff"),
|
||||
UInt64(hex"effffffffffffffe")
|
||||
).map(value => {
|
||||
assert(codec.decode(codec.encode(value).require).require.value === value)
|
||||
})
|
||||
}
|
||||
|
||||
}
|
|
@ -16,9 +16,8 @@
|
|||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import java.net.{Inet4Address, Inet6Address, InetAddress}
|
||||
import java.net.{Inet4Address, InetAddress}
|
||||
|
||||
import com.google.common.net.InetAddresses
|
||||
import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey}
|
||||
import fr.acinq.bitcoin.{Block, ByteVector32, ByteVector64}
|
||||
import fr.acinq.eclair._
|
||||
|
@ -44,132 +43,6 @@ class LightningMessageCodecsSpec extends FunSuite {
|
|||
|
||||
def publicKey(fill: Byte) = PrivateKey(ByteVector.fill(32)(fill)).publicKey
|
||||
|
||||
test("encode/decode with uint64 codec") {
|
||||
val expected = Map(
|
||||
UInt64(0) -> hex"00 00 00 00 00 00 00 00",
|
||||
UInt64(42) -> hex"00 00 00 00 00 00 00 2a",
|
||||
UInt64(hex"ffffffffffffffff") -> hex"ff ff ff ff ff ff ff ff"
|
||||
).mapValues(_.toBitVector)
|
||||
for ((uint, ref) <- expected) {
|
||||
val encoded = uint64ex.encode(uint).require
|
||||
assert(ref === encoded)
|
||||
val decoded = uint64ex.decode(encoded).require.value
|
||||
assert(uint === decoded)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with rgb codec") {
|
||||
val color = Color(47.toByte, 255.toByte, 142.toByte)
|
||||
val bin = rgb.encode(color).require
|
||||
assert(bin === hex"2f ff 8e".toBitVector)
|
||||
val color2 = rgb.decode(bin).require.value
|
||||
assert(color === color2)
|
||||
}
|
||||
|
||||
test("encode/decode all kind of IPv6 addresses with ipv6address codec") {
|
||||
{
|
||||
// IPv4 mapped
|
||||
val bin = hex"00000000000000000000ffffae8a0b08".toBitVector
|
||||
val ipv6 = Inet6Address.getByAddress(null, bin.toByteArray, null)
|
||||
val bin2 = ipv6address.encode(ipv6).require
|
||||
assert(bin === bin2)
|
||||
}
|
||||
|
||||
{
|
||||
// regular IPv6 address
|
||||
val ipv6 = InetAddresses.forString("1080:0:0:0:8:800:200C:417A").asInstanceOf[Inet6Address]
|
||||
val bin = ipv6address.encode(ipv6).require
|
||||
val ipv62 = ipv6address.decode(bin).require.value
|
||||
assert(ipv6 === ipv62)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with nodeaddress codec") {
|
||||
{
|
||||
val ipv4addr = InetAddress.getByAddress(Array[Byte](192.toByte, 168.toByte, 1.toByte, 42.toByte)).asInstanceOf[Inet4Address]
|
||||
val nodeaddr = IPv4(ipv4addr, 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"01 C0 A8 01 2A 10 87".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val ipv6addr = InetAddress.getByAddress(hex"2001 0db8 0000 85a3 0000 0000 ac1f 8001".toArray).asInstanceOf[Inet6Address]
|
||||
val nodeaddr = IPv6(ipv6addr, 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"02 2001 0db8 0000 85a3 0000 0000 ac1f 8001 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val nodeaddr = Tor2("z4zif3fy7fe7bpg3", 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"03 cf3282ecb8f949f0bcdb 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
{
|
||||
val nodeaddr = Tor3("mrl2d3ilhctt2vw4qzvmz3etzjvpnc6dczliq5chrxetthgbuczuggyd", 4231)
|
||||
val bin = nodeaddress.encode(nodeaddr).require
|
||||
assert(bin === hex"04 6457a1ed0b38a73d56dc866accec93ca6af68bc316568874478dc9399cc1a0b3431b03 1087".toBitVector)
|
||||
val nodeaddr2 = nodeaddress.decode(bin).require.value
|
||||
assert(nodeaddr === nodeaddr2)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode with private key codec") {
|
||||
val value = PrivateKey(randomBytes32)
|
||||
val wire = LightningMessageCodecs.privateKey.encode(value).require
|
||||
assert(wire.length == 256)
|
||||
val value1 = LightningMessageCodecs.privateKey.decode(wire).require.value
|
||||
assert(value1 == value)
|
||||
}
|
||||
|
||||
test("encode/decode with public key codec") {
|
||||
val value = PrivateKey(randomBytes32).publicKey
|
||||
val wire = LightningMessageCodecs.publicKey.encode(value).require
|
||||
assert(wire.length == 33 * 8)
|
||||
val value1 = LightningMessageCodecs.publicKey.decode(wire).require.value
|
||||
assert(value1 == value)
|
||||
}
|
||||
|
||||
test("encode/decode with zeropaddedstring codec") {
|
||||
val c = zeropaddedstring(32)
|
||||
|
||||
{
|
||||
val alias = "IRATEMONK"
|
||||
val bin = c.encode(alias).require
|
||||
assert(bin === BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.size)(0)))
|
||||
val alias2 = c.decode(bin).require.value
|
||||
assert(alias === alias2)
|
||||
}
|
||||
|
||||
{
|
||||
val alias = "this-alias-is-exactly-32-B-long."
|
||||
val bin = c.encode(alias).require
|
||||
assert(bin === BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.size)(0)))
|
||||
val alias2 = c.decode(bin).require.value
|
||||
assert(alias === alias2)
|
||||
}
|
||||
|
||||
{
|
||||
val alias = "this-alias-is-far-too-long-because-we-are-limited-to-32-bytes"
|
||||
assert(c.encode(alias).isFailure)
|
||||
}
|
||||
}
|
||||
|
||||
test("encode/decode UInt64") {
|
||||
val codec = uint64ex
|
||||
Seq(
|
||||
UInt64(hex"ffffffffffffffff"),
|
||||
UInt64(hex"fffffffffffffffe"),
|
||||
UInt64(hex"efffffffffffffff"),
|
||||
UInt64(hex"effffffffffffffe")
|
||||
).map(value => {
|
||||
assert(codec.decode(codec.encode(value).require).require.value === value)
|
||||
})
|
||||
}
|
||||
|
||||
test("encode/decode live node_announcements") {
|
||||
val anns = List(
|
||||
hex"a58338c9660d135fd7d087eb62afd24a33562c54507a9334e79f0dc4f17d407e6d7c61f0e2f3d0d38599502f61704cf1ae93608df027014ade7ff592f27ce26900005acdf50702d2eabbbacc7c25bbd73b39e65d28237705f7bde76f557e94fb41cb18a9ec00841122116c6e302e646563656e7465722e776f726c64000000000000000000000000000000130200000000000000000000ffffae8a0b082607"
|
||||
|
@ -189,7 +62,6 @@ class LightningMessageCodecsSpec extends FunSuite {
|
|||
}
|
||||
|
||||
test("encode/decode all channel messages") {
|
||||
|
||||
val open = OpenChannel(randomBytes32, randomBytes32, 3, 4, 5, UInt64(6), 7, 8, 9, 10, 11, publicKey(1), point(2), point(3), point(4), point(5), point(6), 0.toByte)
|
||||
val accept = AcceptChannel(randomBytes32, 3, UInt64(4), 5, 6, 7, 8, 9, publicKey(1), point(2), point(3), point(4), point(5), point(6))
|
||||
val funding_created = FundingCreated(randomBytes32, bin32(0), 3, randomBytes64)
|
||||
|
@ -222,7 +94,7 @@ class LightningMessageCodecsSpec extends FunSuite {
|
|||
channel_announcement :: node_announcement :: channel_update :: gossip_timestamp_filter :: query_short_channel_id :: query_channel_range :: reply_channel_range :: announcement_signatures :: ping :: pong :: channel_reestablish :: Nil
|
||||
|
||||
msgs.foreach {
|
||||
case msg => {
|
||||
msg => {
|
||||
val encoded = lightningMessageCodec.encode(msg).require
|
||||
val decoded = lightningMessageCodec.decode(encoded).require
|
||||
assert(msg === decoded.value)
|
||||
|
@ -245,38 +117,6 @@ class LightningMessageCodecsSpec extends FunSuite {
|
|||
}
|
||||
}
|
||||
|
||||
test("encode/decode using cached codec") {
|
||||
val codec = cachedLightningMessageCodec
|
||||
|
||||
val commit_sig = CommitSig(randomBytes32, randomBytes64, randomBytes64 :: randomBytes64 :: randomBytes64 :: Nil)
|
||||
val revoke_and_ack = RevokeAndAck(randomBytes32, scalar(0), point(1))
|
||||
val channel_announcement = ChannelAnnouncement(randomBytes64, randomBytes64, randomBytes64, randomBytes64, bin(7, 9), Block.RegtestGenesisBlock.hash, ShortChannelId(1), randomKey.publicKey, randomKey.publicKey, randomKey.publicKey, randomKey.publicKey)
|
||||
val node_announcement = NodeAnnouncement(randomBytes64, bin(0, 0), 1, randomKey.publicKey, Color(100.toByte, 200.toByte, 300.toByte), "node-alias", IPv4(InetAddress.getByAddress(Array[Byte](192.toByte, 168.toByte, 1.toByte, 42.toByte)).asInstanceOf[Inet4Address], 42000) :: Nil)
|
||||
val channel_update1 = ChannelUpdate(randomBytes64, Block.RegtestGenesisBlock.hash, ShortChannelId(1), 2, 1, 0, 3, 4, 5, 6, Some(50000000L))
|
||||
val channel_update2 = ChannelUpdate(randomBytes64, Block.RegtestGenesisBlock.hash, ShortChannelId(1), 2, 0, 0, 3, 4, 5, 6, None)
|
||||
val announcement_signatures = AnnouncementSignatures(randomBytes32, ShortChannelId(42), randomBytes64, randomBytes64)
|
||||
val ping = Ping(100, bin(10, 1))
|
||||
val pong = Pong(bin(10, 1))
|
||||
|
||||
val cached = channel_announcement :: node_announcement :: channel_update1 :: channel_update2 :: Nil
|
||||
val nonCached = commit_sig :: revoke_and_ack :: announcement_signatures :: ping :: pong :: Nil
|
||||
val msgs: List[LightningMessage] = cached ::: nonCached
|
||||
|
||||
msgs.foreach {
|
||||
case msg => {
|
||||
val encoded = codec.encode(msg).require
|
||||
val decoded = codec.decode(encoded).require
|
||||
assert(msg === decoded.value)
|
||||
}
|
||||
}
|
||||
|
||||
import scala.language.reflectiveCalls
|
||||
val cachedKeys = codec.cache.asMap().keySet()
|
||||
assert(cached.forall(msg => cachedKeys.contains(msg)))
|
||||
assert(nonCached.forall(msg => !cachedKeys.contains(msg)))
|
||||
|
||||
}
|
||||
|
||||
test("decode channel_update with htlc_maximum_msat") {
|
||||
// this was generated by c-lightning
|
||||
val bin = hex"010258fff7d0e987e2cdd560e3bb5a046b4efe7b26c969c2f51da1dceec7bcb8ae1b634790503d5290c1a6c51d681cf8f4211d27ed33a257dcc1102862571bf1792306226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f0005a100000200005bc75919010100060000000000000001000000010000000a000000003a699d00"
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
/*
|
||||
* Copyright 2019 ACINQ SAS
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package fr.acinq.eclair.wire
|
||||
|
||||
import fr.acinq.bitcoin.Crypto.PublicKey
|
||||
import fr.acinq.eclair.{ShortChannelId, UInt64}
|
||||
import fr.acinq.eclair.UInt64.Conversions._
|
||||
import fr.acinq.eclair.wire.CommonCodecs.{publicKey, shortchannelid, uint64, varint}
|
||||
import fr.acinq.eclair.wire.TlvCodecs._
|
||||
import org.scalatest.FunSuite
|
||||
import scodec.bits.HexStringSyntax
|
||||
import scodec.codecs._
|
||||
import scodec.Codec
|
||||
|
||||
/**
|
||||
* Created by t-bast on 20/06/2019.
|
||||
*/
|
||||
|
||||
class TlvCodecsSpec extends FunSuite {
|
||||
|
||||
import TlvCodecsSpec._
|
||||
|
||||
test("encode/decode tlv") {
|
||||
val testCases = Seq(
|
||||
(hex"01 08 000000000000002a", TestType1(42)),
|
||||
(hex"02 08 0000000000000226", TestType2(ShortChannelId(550))),
|
||||
(hex"03 31 02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619 0000000000000231 0000000000000451", TestType3(PublicKey(hex"02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619"), 561, 1105)),
|
||||
(hex"ff1234567890123456 fd0001 10101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010010101010101", GenericTlv(6211610197754262546L, hex"10101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010010101010101"))
|
||||
)
|
||||
|
||||
for ((bin, expected) <- testCases) {
|
||||
val decoded = testTlvCodec.decode(bin.toBitVector).require.value.asInstanceOf[Tlv]
|
||||
assert(decoded === expected)
|
||||
val encoded = testTlvCodec.encode(expected).require.toByteVector
|
||||
assert(encoded === bin)
|
||||
}
|
||||
}
|
||||
|
||||
test("decode invalid tlv") {
|
||||
val testCases = Seq(
|
||||
hex"fd02", // type truncated
|
||||
hex"fd022a", // truncated after type
|
||||
hex"fd0100", // not minimally encoded type
|
||||
hex"2a fd02", // length truncated
|
||||
hex"2a fd0226", // truncated after length
|
||||
hex"2a fe01010000", // not minimally encoded length
|
||||
hex"2a fd2602 0231", // value truncated
|
||||
hex"02 01 2a", // short channel id too short
|
||||
hex"02 09 010101010101010101", // short channel id length too big
|
||||
hex"2a ff0000000000000080" // invalid length (too big to fit inside a long)
|
||||
)
|
||||
|
||||
for (testCase <- testCases) {
|
||||
assert(testTlvCodec.decode(testCase.toBitVector).isFailure)
|
||||
}
|
||||
}
|
||||
|
||||
test("decode invalid tlv stream") {
|
||||
val testCases = Seq(
|
||||
hex"0108000000000000002a 02", // valid tlv record followed by invalid tlv record (only type, length and value are missing)
|
||||
hex"02080000000000000226 0108000000000000002a", // valid tlv records but invalid ordering
|
||||
hex"02080000000000000231 02080000000000000451", // duplicate tlv type
|
||||
hex"0108000000000000002a 2a0101", // unknown even type
|
||||
hex"0a080000000000000231 0b0400000451" // valid tlv records but from different namespace
|
||||
)
|
||||
|
||||
for (testCase <- testCases) {
|
||||
assert(tlvStream(testTlvCodec).decode(testCase.toBitVector).isFailure, testCase)
|
||||
}
|
||||
}
|
||||
|
||||
test("create invalid tlv stream") {
|
||||
assertThrows[IllegalArgumentException](TlvStream(Seq(GenericTlv(42, hex"2a")))) // unknown even type
|
||||
assertThrows[IllegalArgumentException](TlvStream(Seq(TestType1(561), TestType2(ShortChannelId(1105)), GenericTlv(42, hex"2a")))) // unknown even type
|
||||
assertThrows[IllegalArgumentException](TlvStream(Seq(TestType1(561), TestType1(1105)))) // duplicate type
|
||||
assertThrows[IllegalArgumentException](TlvStream(Seq(TestType2(ShortChannelId(1105)), TestType1(561)))) // invalid ordering
|
||||
}
|
||||
|
||||
test("encode/decode tlv stream") {
|
||||
val bin = hex"01080000000000000231 02080000000000000451 033102eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f28368661900000000000002310000000000000451"
|
||||
val expected = Seq(
|
||||
TestType1(561),
|
||||
TestType2(ShortChannelId(1105)),
|
||||
TestType3(PublicKey(hex"02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619"), 561, 1105)
|
||||
)
|
||||
|
||||
val decoded = tlvStream(testTlvCodec).decode(bin.toBitVector).require.value
|
||||
assert(decoded === TlvStream(expected))
|
||||
|
||||
val encoded = tlvStream(testTlvCodec).encode(TlvStream(expected)).require.toByteVector
|
||||
assert(encoded === bin)
|
||||
}
|
||||
|
||||
test("encode/decode tlv stream with unknown odd type") {
|
||||
val bin = hex"01080000000000000231 0b0400000451 0d02002a"
|
||||
val expected = Seq(
|
||||
TestType1(561),
|
||||
GenericTlv(11, hex"00000451"),
|
||||
TestType13(42)
|
||||
)
|
||||
|
||||
val decoded = tlvStream(testTlvCodec).decode(bin.toBitVector).require.value
|
||||
assert(decoded === TlvStream(expected))
|
||||
|
||||
val encoded = tlvStream(testTlvCodec).encode(TlvStream(expected)).require.toByteVector
|
||||
assert(encoded === bin)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object TlvCodecsSpec {
|
||||
|
||||
// @formatter:off
|
||||
sealed trait TestTlv extends Tlv
|
||||
case class TestType1(uintValue: UInt64) extends TestTlv { override val `type` = UInt64(1) }
|
||||
case class TestType2(shortChannelId: ShortChannelId) extends TestTlv { override val `type` = UInt64(2) }
|
||||
case class TestType3(nodeId: PublicKey, value1: UInt64, value2: UInt64) extends TestTlv { override val `type` = UInt64(3) }
|
||||
case class TestType13(intValue: Int) extends TestTlv { override val `type` = UInt64(13) }
|
||||
|
||||
val testCodec1: Codec[TestType1] = (("length" | constant(hex"08")) :: ("value" | uint64)).as[TestType1]
|
||||
val testCodec2: Codec[TestType2] = (("length" | constant(hex"08")) :: ("short_channel_id" | shortchannelid)).as[TestType2]
|
||||
val testCodec3: Codec[TestType3] = (("length" | constant(hex"31")) :: ("node_id" | publicKey) :: ("value_1" | uint64) :: ("value_2" | uint64)).as[TestType3]
|
||||
val testCodec13: Codec[TestType13] = (("length" | constant(hex"02")) :: ("value" | uint16)).as[TestType13]
|
||||
val testTlvCodec = tlvFallback(discriminated[Tlv].by(varint)
|
||||
.typecase(1, testCodec1)
|
||||
.typecase(2, testCodec2)
|
||||
.typecase(3, testCodec3)
|
||||
.typecase(13, testCodec13)
|
||||
)
|
||||
|
||||
sealed trait OtherTlv extends Tlv
|
||||
case class OtherType1(uintValue: UInt64) extends OtherTlv { override val `type` = UInt64(10) }
|
||||
case class OtherType2(smallValue: Long) extends OtherTlv { override val `type` = UInt64(11) }
|
||||
|
||||
val otherCodec1: Codec[OtherType1] = (("length" | constant(hex"08")) :: ("value" | uint64)).as[OtherType1]
|
||||
val otherCodec2: Codec[OtherType2] = (("length" | constant(hex"04")) :: ("value" | uint32)).as[OtherType2]
|
||||
val otherTlvCodec = tlvFallback(discriminated[Tlv].by(varint)
|
||||
.typecase(10, otherCodec1)
|
||||
.typecase(11, otherCodec2)
|
||||
)
|
||||
// @formatter:on
|
||||
|
||||
}
|
5
pom.xml
5
pom.xml
|
@ -128,6 +128,11 @@
|
|||
<!-- needed to compile Scala code on JDK9+ -->
|
||||
<arg>-nobootcp</arg>
|
||||
</args>
|
||||
<jvmArgs>
|
||||
<jvmArg>-Xmx1024m</jvmArg>
|
||||
<jvmArg>-Xms1024m</jvmArg>
|
||||
<jvmArg>-Xss32m</jvmArg>
|
||||
</jvmArgs>
|
||||
<scalaCompatVersion>${scala.version.short}</scalaCompatVersion>
|
||||
</configuration>
|
||||
<executions>
|
||||
|
|
Loading…
Add table
Reference in a new issue