mirror of
https://github.com/ACINQ/eclair.git
synced 2024-11-20 02:27:32 +01:00
made commitment functions no-op when replaying known messages, improved fuzzy tests
This commit is contained in:
parent
b8fac57ac0
commit
6ebe5b4ff2
@ -8,7 +8,7 @@ import fr.acinq.eclair.blockchain._
|
|||||||
import fr.acinq.eclair.blockchain.peer.CurrentBlockCount
|
import fr.acinq.eclair.blockchain.peer.CurrentBlockCount
|
||||||
import fr.acinq.eclair.channel.Helpers.{Closing, Funding}
|
import fr.acinq.eclair.channel.Helpers.{Closing, Funding}
|
||||||
import fr.acinq.eclair.crypto.{Generators, ShaChain}
|
import fr.acinq.eclair.crypto.{Generators, ShaChain}
|
||||||
import fr.acinq.eclair.payment.Binding
|
import fr.acinq.eclair.payment._
|
||||||
import fr.acinq.eclair.router.Announcements
|
import fr.acinq.eclair.router.Announcements
|
||||||
import fr.acinq.eclair.transactions._
|
import fr.acinq.eclair.transactions._
|
||||||
import fr.acinq.eclair.wire._
|
import fr.acinq.eclair.wire._
|
||||||
@ -391,25 +391,20 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
case Event(c: CMD_ADD_HTLC, d: DATA_NORMAL) if d.unackedShutdown.isDefined =>
|
case Event(c: CMD_ADD_HTLC, d: DATA_NORMAL) if d.unackedShutdown.isDefined =>
|
||||||
handleCommandError(sender, new RuntimeException("cannot send new htlcs, closing in progress"))
|
handleCommandError(sender, new RuntimeException("cannot send new htlcs, closing in progress"))
|
||||||
|
|
||||||
case Event(c@CMD_ADD_HTLC(amountMsat, rHash, expiry, route, origin, do_commit), d@DATA_NORMAL(params, commitments, _)) =>
|
case Event(c@CMD_ADD_HTLC(amountMsat, rHash, expiry, route, downstream_opt, do_commit), d@DATA_NORMAL(params, commitments, _)) =>
|
||||||
Try(Commitments.sendAdd(commitments, c)) match {
|
Try(Commitments.sendAdd(commitments, c)) match {
|
||||||
case Success((commitments1, add)) =>
|
case Success((commitments1, add)) =>
|
||||||
relayer ! Binding(add, origin)
|
val origin = downstream_opt.map(Relayed(_)).getOrElse(Local(sender))
|
||||||
|
relayer ! Bind(add, origin)
|
||||||
if (do_commit) self ! CMD_SIGN
|
if (do_commit) self ! CMD_SIGN
|
||||||
handleCommandSuccess(sender, add, d.copy(commitments = commitments1))
|
handleCommandSuccess(sender, add, d.copy(commitments = commitments1))
|
||||||
case Failure(cause) => handleCommandError(sender, cause)
|
case Failure(cause) => handleCommandError(sender, cause)
|
||||||
}
|
}
|
||||||
|
|
||||||
case Event(add: UpdateAddHtlc, d@DATA_NORMAL(params, commitments, _)) =>
|
case Event(add: UpdateAddHtlc, d@DATA_NORMAL(params, commitments, _)) =>
|
||||||
if (Commitments.isOldAdd(d.commitments, add)) {
|
Try(Commitments.receiveAdd(commitments, add)) match {
|
||||||
log.warning(s"ignoring old add")
|
case Success(commitments1) => stay using d.copy(commitments = commitments1)
|
||||||
stay
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
} else {
|
|
||||||
Try(Commitments.receiveAdd(commitments, add)) match {
|
|
||||||
case Success(commitments1) =>
|
|
||||||
stay using d.copy(commitments = commitments1)
|
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case Event(c@CMD_FULFILL_HTLC(id, r, do_commit), d: DATA_NORMAL) =>
|
case Event(c@CMD_FULFILL_HTLC(id, r, do_commit), d: DATA_NORMAL) =>
|
||||||
@ -421,19 +416,12 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
}
|
}
|
||||||
|
|
||||||
case Event(fulfill@UpdateFulfillHtlc(_, id, r), d@DATA_NORMAL(params, commitments, _)) =>
|
case Event(fulfill@UpdateFulfillHtlc(_, id, r), d@DATA_NORMAL(params, commitments, _)) =>
|
||||||
if (Commitments.isOldFulfill(d.commitments, fulfill)) {
|
Try(Commitments.receiveFulfill(d.commitments, fulfill)) match {
|
||||||
log.warning(s"ignoring old fulfill")
|
case Success(Right(commitments1)) =>
|
||||||
stay
|
relayer ! ForwardFulfill(fulfill)
|
||||||
} /*else if (!Commitments.isFulfillCrossSigned(d.commitments, fulfill)) {
|
stay using d.copy(commitments = commitments1)
|
||||||
log.warning(s"ignoring fulfill not cross-signed (old?)")
|
case Success(Left(_)) => stay
|
||||||
stay
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}*/ else {
|
|
||||||
Try(Commitments.receiveFulfill(d.commitments, fulfill)) match {
|
|
||||||
case Success((commitments1, htlc)) =>
|
|
||||||
relayer ! (htlc, fulfill)
|
|
||||||
stay using d.copy(commitments = commitments1)
|
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case Event(c@CMD_FAIL_HTLC(id, reason, do_commit), d: DATA_NORMAL) =>
|
case Event(c@CMD_FAIL_HTLC(id, reason, do_commit), d: DATA_NORMAL) =>
|
||||||
@ -446,9 +434,10 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
|
|
||||||
case Event(fail@UpdateFailHtlc(_, id, reason), d@DATA_NORMAL(params, _, _)) =>
|
case Event(fail@UpdateFailHtlc(_, id, reason), d@DATA_NORMAL(params, _, _)) =>
|
||||||
Try(Commitments.receiveFail(d.commitments, fail)) match {
|
Try(Commitments.receiveFail(d.commitments, fail)) match {
|
||||||
case Success((commitments1, htlc)) =>
|
case Success(Right(commitments1)) =>
|
||||||
relayer ! (htlc, fail)
|
relayer ! ForwardFail(fail)
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
|
case Success(Left(_)) => stay
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -470,47 +459,43 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
}
|
}
|
||||||
|
|
||||||
case Event(commit@CommitSig(_, theirSig, theirHtlcSigs), d: DATA_NORMAL) =>
|
case Event(commit@CommitSig(_, theirSig, theirHtlcSigs), d: DATA_NORMAL) =>
|
||||||
if (Commitments.isOldCommit(d.commitments, commit)) {
|
Try(Commitments.receiveCommit(d.commitments, commit)) match {
|
||||||
log.warning(s"ignoring old commit")
|
case Success(Right((commitments1, revocation))) =>
|
||||||
stay
|
remote ! revocation
|
||||||
} else {
|
log.debug(s"received a new sig, spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
Try(Commitments.receiveCommit(d.commitments, commit)) match {
|
if (Commitments.localHasChanges(commitments1)) {
|
||||||
case Success((commitments1, revocation)) =>
|
// if we have newly acknowledged changes let's sign them
|
||||||
remote ! revocation
|
self ! CMD_SIGN
|
||||||
log.debug(s"received a new sig, spec:\n${Commitments.specsToString(commitments1)}")
|
}
|
||||||
if (Commitments.localHasChanges(commitments1)) {
|
context.system.eventStream.publish(ChannelSignatureReceived(self, commitments1))
|
||||||
// if we have newly acknowledged changes let's sign them
|
stay using d.copy(commitments = commitments1)
|
||||||
self ! CMD_SIGN
|
case Success(Left(_)) =>
|
||||||
}
|
// this was an old commit, nothing to do
|
||||||
context.system.eventStream.publish(ChannelSignatureReceived(self, commitments1))
|
stay
|
||||||
stay using d.copy(commitments = commitments1)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case Event(revocation: RevokeAndAck, d: DATA_NORMAL) =>
|
case Event(revocation: RevokeAndAck, d: DATA_NORMAL) =>
|
||||||
if (Commitments.isOldRevocation(d.commitments, revocation)) {
|
// we received a revocation because we sent a signature
|
||||||
log.warning(s"ignoring old revocation")
|
// => all our changes have been acked
|
||||||
stay
|
Try(Commitments.receiveRevocation(d.commitments, revocation)) match {
|
||||||
} else {
|
case Success(Right(commitments1)) =>
|
||||||
// we received a revocation because we sent a signature
|
// we forward HTLCs only when they have been committed by both sides
|
||||||
// => all our changes have been acked
|
// it always happen when we receive a revocation, because, we always sign our changes before they sign them
|
||||||
Try(Commitments.receiveRevocation(d.commitments, revocation)) match {
|
d.commitments.remoteChanges.signed.collect {
|
||||||
case Success(commitments1) =>
|
case htlc: UpdateAddHtlc =>
|
||||||
// we forward HTLCs only when they have been committed by both sides
|
log.debug(s"relaying $htlc")
|
||||||
// it always happen when we receive a revocation, because, we always sign our changes before they sign them
|
relayer ! ForwardAdd(htlc)
|
||||||
d.commitments.remoteChanges.signed.collect {
|
}
|
||||||
case htlc: UpdateAddHtlc =>
|
log.debug(s"received a new rev, spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
log.debug(s"relaying $htlc")
|
if (Commitments.localHasChanges(commitments1) && d.commitments.remoteNextCommitInfo.left.map(_.reSignAsap) == Left(true)) {
|
||||||
relayer ! htlc
|
self ! CMD_SIGN
|
||||||
}
|
}
|
||||||
log.debug(s"received a new rev, spec:\n${Commitments.specsToString(commitments1)}")
|
stay using d.copy(commitments = commitments1)
|
||||||
if (Commitments.localHasChanges(commitments1) && d.commitments.remoteNextCommitInfo.left.map(_.reSignAsap) == Left(true)) {
|
case Success(Left(_)) =>
|
||||||
self ! CMD_SIGN
|
// this was an old revocation, nothing to do
|
||||||
}
|
stay
|
||||||
stay using d.copy(commitments = commitments1)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case Event(CMD_CLOSE(ourScriptPubKey_opt), d: DATA_NORMAL) if d.unackedShutdown.isDefined =>
|
case Event(CMD_CLOSE(ourScriptPubKey_opt), d: DATA_NORMAL) if d.unackedShutdown.isDefined =>
|
||||||
@ -587,9 +572,10 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
|
|
||||||
case Event(fulfill@UpdateFulfillHtlc(_, id, r), d: DATA_SHUTDOWN) =>
|
case Event(fulfill@UpdateFulfillHtlc(_, id, r), d: DATA_SHUTDOWN) =>
|
||||||
Try(Commitments.receiveFulfill(d.commitments, fulfill)) match {
|
Try(Commitments.receiveFulfill(d.commitments, fulfill)) match {
|
||||||
case Success((commitments1, htlc)) =>
|
case Success(Right(commitments1)) =>
|
||||||
relayer ! (htlc, fulfill)
|
relayer ! ForwardFulfill(fulfill)
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
|
case Success(Left(_)) => stay
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -601,9 +587,10 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
|
|
||||||
case Event(fail@UpdateFailHtlc(_, id, reason), d: DATA_SHUTDOWN) =>
|
case Event(fail@UpdateFailHtlc(_, id, reason), d: DATA_SHUTDOWN) =>
|
||||||
Try(Commitments.receiveFail(d.commitments, fail)) match {
|
Try(Commitments.receiveFail(d.commitments, fail)) match {
|
||||||
case Success((commitments1, htlc)) =>
|
case Success(Right(commitments1)) =>
|
||||||
relayer ! (htlc, fail)
|
relayer ! ForwardFail(fail)
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
|
case Success(Left(_)) => stay
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -627,13 +614,13 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
case Event(msg@CommitSig(_, theirSig, theirHtlcSigs), d@DATA_SHUTDOWN(params, commitments, localShutdown, remoteShutdown)) =>
|
case Event(msg@CommitSig(_, theirSig, theirHtlcSigs), d@DATA_SHUTDOWN(params, commitments, localShutdown, remoteShutdown)) =>
|
||||||
// TODO: we might have to propagate htlcs upstream depending on the outcome of https://github.com/ElementsProject/lightning/issues/29
|
// TODO: we might have to propagate htlcs upstream depending on the outcome of https://github.com/ElementsProject/lightning/issues/29
|
||||||
Try(Commitments.receiveCommit(d.commitments, msg)) match {
|
Try(Commitments.receiveCommit(d.commitments, msg)) match {
|
||||||
case Success((commitments1, revocation)) if commitments1.hasNoPendingHtlcs =>
|
case Success(Right((commitments1, revocation))) if commitments1.hasNoPendingHtlcs =>
|
||||||
remote ! revocation
|
remote ! revocation
|
||||||
val closingSigned = Closing.makeFirstClosingTx(params, commitments1, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey)
|
val closingSigned = Closing.makeFirstClosingTx(params, commitments1, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey)
|
||||||
remote ! closingSigned
|
remote ! closingSigned
|
||||||
log.debug(s"received a new sig, switching to NEGOTIATING spec:\n${Commitments.specsToString(commitments1)}")
|
log.debug(s"received a new sig, switching to NEGOTIATING spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
goto(NEGOTIATING) using DATA_NEGOTIATING(params, commitments1, localShutdown, remoteShutdown, closingSigned)
|
goto(NEGOTIATING) using DATA_NEGOTIATING(params, commitments1, localShutdown, remoteShutdown, closingSigned)
|
||||||
case Success((commitments1, revocation)) =>
|
case Success(Right((commitments1, revocation))) =>
|
||||||
remote ! revocation
|
remote ! revocation
|
||||||
if (Commitments.localHasChanges(commitments1)) {
|
if (Commitments.localHasChanges(commitments1)) {
|
||||||
// if we have newly acknowledged changes let's sign them
|
// if we have newly acknowledged changes let's sign them
|
||||||
@ -642,6 +629,9 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
log.debug(s"received a new sig, spec:\n${Commitments.specsToString(commitments1)}")
|
log.debug(s"received a new sig, spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
context.system.eventStream.publish(ChannelSignatureReceived(self, commitments1))
|
context.system.eventStream.publish(ChannelSignatureReceived(self, commitments1))
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
|
case Success(Left(_)) =>
|
||||||
|
// this was an old commit, nothing to do
|
||||||
|
stay
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -649,18 +639,21 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
// we received a revocation because we sent a signature
|
// we received a revocation because we sent a signature
|
||||||
// => all our changes have been acked
|
// => all our changes have been acked
|
||||||
Try(Commitments.receiveRevocation(d.commitments, msg)) match {
|
Try(Commitments.receiveRevocation(d.commitments, msg)) match {
|
||||||
case Success(commitments1) if commitments1.hasNoPendingHtlcs =>
|
case Success(Right(commitments1)) if commitments1.hasNoPendingHtlcs =>
|
||||||
val closingSigned = Closing.makeFirstClosingTx(params, commitments1, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey)
|
val closingSigned = Closing.makeFirstClosingTx(params, commitments1, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey)
|
||||||
|
|
||||||
remote ! closingSigned
|
remote ! closingSigned
|
||||||
log.debug(s"received a new rev, switching to NEGOTIATING spec:\n${Commitments.specsToString(commitments1)}")
|
log.debug(s"received a new rev, switching to NEGOTIATING spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
goto(NEGOTIATING) using DATA_NEGOTIATING(params, commitments1, localShutdown, remoteShutdown, closingSigned)
|
goto(NEGOTIATING) using DATA_NEGOTIATING(params, commitments1, localShutdown, remoteShutdown, closingSigned)
|
||||||
case Success(commitments1) =>
|
case Success(Right(commitments1)) =>
|
||||||
if (Commitments.localHasChanges(commitments1) && d.commitments.remoteNextCommitInfo.left.map(_.reSignAsap) == Left(true)) {
|
if (Commitments.localHasChanges(commitments1) && d.commitments.remoteNextCommitInfo.left.map(_.reSignAsap) == Left(true)) {
|
||||||
self ! CMD_SIGN
|
self ! CMD_SIGN
|
||||||
}
|
}
|
||||||
log.debug(s"received a new rev, spec:\n${Commitments.specsToString(commitments1)}")
|
log.debug(s"received a new rev, spec:\n${Commitments.specsToString(commitments1)}")
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
|
case Success(Left(_)) =>
|
||||||
|
// this was an old revocation, nothing to do
|
||||||
|
stay
|
||||||
case Failure(cause) => handleLocalError(cause, d)
|
case Failure(cause) => handleLocalError(cause, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -814,9 +807,9 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
|
|
||||||
case Event(INPUT_RECONNECTED(r), d@DATA_NORMAL(_, commitments, _)) =>
|
case Event(INPUT_RECONNECTED(r), d@DATA_NORMAL(_, commitments, _)) =>
|
||||||
remote = r
|
remote = r
|
||||||
log.info(s"resuming with ${Commitments.toString(commitments)}")
|
log.info(s"resuming with ${Commitments.changesToString(commitments)}")
|
||||||
//val resend = commitments.unackedMessages.filterNot(_.isInstanceOf[RevokeAndAck])
|
//val resend = commitments.unackedMessages.filterNot(_.isInstanceOf[RevokeAndAck])
|
||||||
val resend = commitments.unackedMessages//.filterNot(_.isInstanceOf[RevokeAndAck])
|
val resend = commitments.unackedMessages //.filterNot(_.isInstanceOf[RevokeAndAck])
|
||||||
log.info(s"re-sending: ${resend.map(Commitments.msg2String(_)).mkString(" ")}")
|
log.info(s"re-sending: ${resend.map(Commitments.msg2String(_)).mkString(" ")}")
|
||||||
resend.foreach(remote ! _)
|
resend.foreach(remote ! _)
|
||||||
if (Commitments.localHasChanges(commitments)) {
|
if (Commitments.localHasChanges(commitments)) {
|
||||||
@ -825,11 +818,12 @@ class Channel(val r: ActorRef, val blockchain: ActorRef, router: ActorRef, relay
|
|||||||
}
|
}
|
||||||
goto(NORMAL)
|
goto(NORMAL)
|
||||||
|
|
||||||
case Event(c@CMD_ADD_HTLC(amountMsat, rHash, expiry, route, origin, do_commit), d@DATA_NORMAL(params, commitments, _)) =>
|
case Event(c@CMD_ADD_HTLC(amountMsat, rHash, expiry, route, downstream_opt, do_commit), d@DATA_NORMAL(params, commitments, _)) =>
|
||||||
log.info(s"we are disconnected so we just include the add in our commitments")
|
log.info(s"we are disconnected so we just include the add in our commitments")
|
||||||
Try(Commitments.sendAdd(commitments, c)) match {
|
Try(Commitments.sendAdd(commitments, c)) match {
|
||||||
case Success((commitments1, add)) =>
|
case Success((commitments1, add)) =>
|
||||||
relayer ! Binding(add, origin)
|
val origin = downstream_opt.map(Relayed(_)).getOrElse(Local(sender))
|
||||||
|
relayer ! Bind(add, origin)
|
||||||
sender ! "ok"
|
sender ! "ok"
|
||||||
stay using d.copy(commitments = commitments1)
|
stay using d.copy(commitments = commitments1)
|
||||||
case Failure(cause) => handleCommandError(sender, cause)
|
case Failure(cause) => handleCommandError(sender, cause)
|
||||||
|
@ -3,10 +3,10 @@ package fr.acinq.eclair.channel
|
|||||||
import akka.actor.ActorRef
|
import akka.actor.ActorRef
|
||||||
import fr.acinq.bitcoin.Crypto.{Point, PrivateKey, PublicKey, Scalar}
|
import fr.acinq.bitcoin.Crypto.{Point, PrivateKey, PublicKey, Scalar}
|
||||||
import fr.acinq.bitcoin.{BinaryData, ScriptElt, Transaction}
|
import fr.acinq.bitcoin.{BinaryData, ScriptElt, Transaction}
|
||||||
import fr.acinq.eclair.payment.{Local, Origin}
|
import fr.acinq.eclair.payment.{Local, Origin, Relayed}
|
||||||
import fr.acinq.eclair.transactions.CommitmentSpec
|
import fr.acinq.eclair.transactions.CommitmentSpec
|
||||||
import fr.acinq.eclair.transactions.Transactions.CommitTx
|
import fr.acinq.eclair.transactions.Transactions.CommitTx
|
||||||
import fr.acinq.eclair.wire.{AcceptChannel, AnnouncementSignatures, ClosingSigned, FundingCreated, FundingLocked, FundingSigned, Init, OpenChannel, Shutdown}
|
import fr.acinq.eclair.wire.{AcceptChannel, AnnouncementSignatures, ClosingSigned, FundingCreated, FundingLocked, FundingSigned, Init, OpenChannel, Shutdown, UpdateAddHtlc}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -88,7 +88,7 @@ case class BITCOIN_FUNDING_OTHER_CHANNEL_SPENT(channelId: Long) extends BitcoinE
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
sealed trait Command
|
sealed trait Command
|
||||||
final case class CMD_ADD_HTLC(amountMsat: Long, paymentHash: BinaryData, expiry: Long, onion: BinaryData = BinaryData("00" * 1254), origin: Origin = Local, commit: Boolean = false) extends Command
|
final case class CMD_ADD_HTLC(amountMsat: Long, paymentHash: BinaryData, expiry: Long, onion: BinaryData = BinaryData("00" * 1254), upstream_opt: Option[UpdateAddHtlc] = None, commit: Boolean = false) extends Command
|
||||||
final case class CMD_FULFILL_HTLC(id: Long, r: BinaryData, commit: Boolean = false) extends Command
|
final case class CMD_FULFILL_HTLC(id: Long, r: BinaryData, commit: Boolean = false) extends Command
|
||||||
final case class CMD_FAIL_HTLC(id: Long, reason: String, commit: Boolean = false) extends Command
|
final case class CMD_FAIL_HTLC(id: Long, reason: String, commit: Boolean = false) extends Command
|
||||||
case object CMD_SIGN extends Command
|
case object CMD_SIGN extends Command
|
||||||
|
@ -60,9 +60,9 @@ object Commitments extends Logging {
|
|||||||
* @return an updated commitment instance
|
* @return an updated commitment instance
|
||||||
*/
|
*/
|
||||||
private def addLocalProposal(commitments: Commitments, proposal: UpdateMessage): Commitments =
|
private def addLocalProposal(commitments: Commitments, proposal: UpdateMessage): Commitments =
|
||||||
commitments.copy(
|
commitments.copy(
|
||||||
localChanges = commitments.localChanges.copy(proposed = commitments.localChanges.proposed :+ proposal),
|
localChanges = commitments.localChanges.copy(proposed = commitments.localChanges.proposed :+ proposal),
|
||||||
unackedMessages = commitments.unackedMessages :+ proposal)
|
unackedMessages = commitments.unackedMessages :+ proposal)
|
||||||
|
|
||||||
private def addRemoteProposal(commitments: Commitments, proposal: UpdateMessage): Commitments =
|
private def addRemoteProposal(commitments: Commitments, proposal: UpdateMessage): Commitments =
|
||||||
commitments.copy(remoteChanges = commitments.remoteChanges.copy(proposed = commitments.remoteChanges.proposed :+ proposal))
|
commitments.copy(remoteChanges = commitments.remoteChanges.copy(proposed = commitments.remoteChanges.proposed :+ proposal))
|
||||||
@ -107,116 +107,109 @@ object Commitments extends Logging {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def receiveAdd(commitments: Commitments, add: UpdateAddHtlc): Commitments = {
|
def receiveAdd(commitments: Commitments, add: UpdateAddHtlc): Commitments = {
|
||||||
if (add.id != commitments.remoteNextHtlcId) {
|
isOldAdd(commitments, add) match {
|
||||||
throw new RuntimeException(s"unexpected htlc id: actual=${add.id} expected=${commitments.remoteNextHtlcId}")
|
case true => commitments
|
||||||
|
case false =>
|
||||||
|
|
||||||
|
if (add.id != commitments.remoteNextHtlcId) {
|
||||||
|
throw new RuntimeException(s"unexpected htlc id: actual=${add.id} expected=${commitments.remoteNextHtlcId}")
|
||||||
|
}
|
||||||
|
|
||||||
|
val blockCount = Globals.blockCount.get()
|
||||||
|
// if we are the final payee, we need a reasonable amount of time to pull the funds before the sender can get refunded
|
||||||
|
val minExpiry = blockCount + 3
|
||||||
|
if (add.expiry < minExpiry) {
|
||||||
|
throw new RuntimeException(s"expiry too small: required=$minExpiry actual=${add.expiry} (blockCount=$blockCount)")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (add.amountMsat < commitments.localParams.htlcMinimumMsat) {
|
||||||
|
throw new RuntimeException(s"htlc value too small: min=${commitments.localParams.htlcMinimumMsat}")
|
||||||
|
}
|
||||||
|
|
||||||
|
// let's compute the current commitment *as seen by us* including this change
|
||||||
|
val commitments1 = addRemoteProposal(commitments, add).copy(remoteNextHtlcId = commitments.remoteNextHtlcId + 1)
|
||||||
|
val reduced = CommitmentSpec.reduce(commitments1.localCommit.spec, commitments1.localChanges.acked, commitments1.remoteChanges.proposed)
|
||||||
|
|
||||||
|
val htlcValueInFlight = reduced.htlcs.map(_.add.amountMsat).sum
|
||||||
|
if (htlcValueInFlight > commitments1.localParams.maxHtlcValueInFlightMsat) {
|
||||||
|
throw new RuntimeException(s"in-flight htlcs hold too much value: value=$htlcValueInFlight max=${commitments1.localParams.maxHtlcValueInFlightMsat}")
|
||||||
|
}
|
||||||
|
|
||||||
|
val acceptedHtlcs = reduced.htlcs.count(_.direction == IN)
|
||||||
|
if (acceptedHtlcs > commitments1.localParams.maxAcceptedHtlcs) {
|
||||||
|
throw new RuntimeException(s"too many accepted htlcs: value=$acceptedHtlcs max=${commitments1.localParams.maxAcceptedHtlcs}")
|
||||||
|
}
|
||||||
|
|
||||||
|
// a node cannot spend pending incoming htlcs, and need to keep funds above the reserve required by the counterparty, after paying the fee
|
||||||
|
val fees = if (commitments1.localParams.isFunder) 0 else Transactions.commitTxFee(Satoshi(commitments1.remoteParams.dustLimitSatoshis), reduced).amount
|
||||||
|
val missing = reduced.toRemoteMsat / 1000 - commitments1.localParams.channelReserveSatoshis - fees
|
||||||
|
if (missing < 0) {
|
||||||
|
throw new RuntimeException(s"insufficient funds: missing=${-1 * missing} reserve=${commitments1.localParams.channelReserveSatoshis} fees=$fees")
|
||||||
|
}
|
||||||
|
|
||||||
|
commitments1
|
||||||
}
|
}
|
||||||
|
|
||||||
val blockCount = Globals.blockCount.get()
|
|
||||||
// if we are the final payee, we need a reasonable amount of time to pull the funds before the sender can get refunded
|
|
||||||
val minExpiry = blockCount + 3
|
|
||||||
if (add.expiry < minExpiry) {
|
|
||||||
throw new RuntimeException(s"expiry too small: required=$minExpiry actual=${add.expiry} (blockCount=$blockCount)")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (add.amountMsat < commitments.localParams.htlcMinimumMsat) {
|
|
||||||
throw new RuntimeException(s"htlc value too small: min=${commitments.localParams.htlcMinimumMsat}")
|
|
||||||
}
|
|
||||||
|
|
||||||
// let's compute the current commitment *as seen by us* including this change
|
|
||||||
val commitments1 = addRemoteProposal(commitments, add).copy(remoteNextHtlcId = commitments.remoteNextHtlcId + 1)
|
|
||||||
val reduced = CommitmentSpec.reduce(commitments1.localCommit.spec, commitments1.localChanges.acked, commitments1.remoteChanges.proposed)
|
|
||||||
|
|
||||||
val htlcValueInFlight = reduced.htlcs.map(_.add.amountMsat).sum
|
|
||||||
if (htlcValueInFlight > commitments1.localParams.maxHtlcValueInFlightMsat) {
|
|
||||||
throw new RuntimeException(s"in-flight htlcs hold too much value: value=$htlcValueInFlight max=${commitments1.localParams.maxHtlcValueInFlightMsat}")
|
|
||||||
}
|
|
||||||
|
|
||||||
val acceptedHtlcs = reduced.htlcs.count(_.direction == IN)
|
|
||||||
if (acceptedHtlcs > commitments1.localParams.maxAcceptedHtlcs) {
|
|
||||||
throw new RuntimeException(s"too many accepted htlcs: value=$acceptedHtlcs max=${commitments1.localParams.maxAcceptedHtlcs}")
|
|
||||||
}
|
|
||||||
|
|
||||||
// a node cannot spend pending incoming htlcs, and need to keep funds above the reserve required by the counterparty, after paying the fee
|
|
||||||
val fees = if (commitments1.localParams.isFunder) 0 else Transactions.commitTxFee(Satoshi(commitments1.remoteParams.dustLimitSatoshis), reduced).amount
|
|
||||||
val missing = reduced.toRemoteMsat / 1000 - commitments1.localParams.channelReserveSatoshis - fees
|
|
||||||
if (missing < 0) {
|
|
||||||
throw new RuntimeException(s"insufficient funds: missing=${-1 * missing} reserve=${commitments1.localParams.channelReserveSatoshis} fees=$fees")
|
|
||||||
}
|
|
||||||
|
|
||||||
commitments1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def isHtlcCrossSigned(commitments: Commitments, cmd: CMD_FULFILL_HTLC): Boolean = {
|
def getHtlcCrossSigned(commitments: Commitments, directionRelativeToLocal: Direction, htlcId: Long): Option[UpdateAddHtlc] = {
|
||||||
val remoteSigned = commitments.localCommit.spec.htlcs.exists(htlc => htlc.direction == IN && htlc.add.id == cmd.id)
|
val remoteSigned = commitments.localCommit.spec.htlcs.find(htlc => htlc.direction == directionRelativeToLocal && htlc.add.id == htlcId)
|
||||||
val localSigned = commitments.remoteNextCommitInfo match {
|
val localSigned = commitments.remoteNextCommitInfo match {
|
||||||
case Left(waitingForRevocation) => waitingForRevocation.nextRemoteCommit.spec.htlcs.exists(htlc => htlc.direction == OUT && htlc.add.id == cmd.id)
|
case Left(waitingForRevocation) => waitingForRevocation.nextRemoteCommit.spec.htlcs.find(htlc => htlc.direction == directionRelativeToLocal.opposite && htlc.add.id == htlcId)
|
||||||
case Right(_) => commitments.remoteCommit.spec.htlcs.exists(htlc => htlc.direction == OUT && htlc.add.id == cmd.id)
|
case Right(_) => commitments.remoteCommit.spec.htlcs.find(htlc => htlc.direction == directionRelativeToLocal.opposite && htlc.add.id == htlcId)
|
||||||
}
|
}
|
||||||
if (!remoteSigned || !localSigned) {
|
for {
|
||||||
logger.warn(
|
htlc_out <- remoteSigned
|
||||||
s"""htlc ${cmd.id} remoteSigned=$remoteSigned localSigned=$localSigned
|
htlc_in <- localSigned
|
||||||
|${specsToString(commitments)}""".stripMargin)
|
} yield htlc_in.add
|
||||||
}
|
|
||||||
remoteSigned && localSigned
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def sendFulfill(commitments: Commitments, cmd: CMD_FULFILL_HTLC): (Commitments, UpdateFulfillHtlc) = {
|
def sendFulfill(commitments: Commitments, cmd: CMD_FULFILL_HTLC): (Commitments, UpdateFulfillHtlc) =
|
||||||
require(isHtlcCrossSigned(commitments, cmd), s"unknown htlc id=${cmd.id}")
|
getHtlcCrossSigned(commitments, IN, cmd.id) match {
|
||||||
commitments.localCommit.spec.htlcs.collectFirst { case u: Htlc if u.direction == IN && u.add.id == cmd.id => u.add } match {
|
|
||||||
case Some(htlc) if htlc.paymentHash == sha256(cmd.r) =>
|
case Some(htlc) if htlc.paymentHash == sha256(cmd.r) =>
|
||||||
val fulfill = UpdateFulfillHtlc(commitments.channelId, cmd.id, cmd.r)
|
val fulfill = UpdateFulfillHtlc(commitments.channelId, cmd.id, cmd.r)
|
||||||
val commitments1 = addLocalProposal(commitments, fulfill)
|
val commitments1 = addLocalProposal(commitments, fulfill)
|
||||||
(commitments1, fulfill)
|
(commitments1, fulfill)
|
||||||
case Some(htlc) => throw new RuntimeException(s"invalid htlc preimage for htlc id=${cmd.id}")
|
case Some(htlc) => throw new RuntimeException(s"invalid htlc preimage for htlc id=${cmd.id}")
|
||||||
case None => ??? // never happens
|
case None => throw new RuntimeException(s"unknown htlc id=${cmd.id}")
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
def isOldFulfill(commitments: Commitments, fulfill: UpdateFulfillHtlc): Boolean = {
|
def isOldFulfill(commitments: Commitments, fulfill: UpdateFulfillHtlc): Boolean =
|
||||||
commitments.remoteChanges.proposed.contains(fulfill) ||
|
commitments.remoteChanges.proposed.contains(fulfill) ||
|
||||||
commitments.remoteChanges.signed.contains(fulfill) ||
|
commitments.remoteChanges.signed.contains(fulfill) ||
|
||||||
commitments.remoteChanges.acked.contains(fulfill)
|
commitments.remoteChanges.acked.contains(fulfill)
|
||||||
}
|
|
||||||
|
|
||||||
def isHtlcCrossSigned(commitments: Commitments, fulfill: UpdateFulfillHtlc): Boolean = {
|
def receiveFulfill(commitments: Commitments, fulfill: UpdateFulfillHtlc): Either[Commitments, Commitments] =
|
||||||
val remoteSigned = commitments.localCommit.spec.htlcs.exists(htlc => htlc.direction == OUT && htlc.add.id == fulfill.id)
|
isOldFulfill(commitments, fulfill) match {
|
||||||
val localSigned = commitments.remoteNextCommitInfo match {
|
case true => Left(commitments)
|
||||||
case Left(waitingForRevocation) => waitingForRevocation.nextRemoteCommit.spec.htlcs.exists(htlc => htlc.direction == IN && htlc.add.id == fulfill.id)
|
case false => getHtlcCrossSigned(commitments, OUT, fulfill.id) match {
|
||||||
case Right(_) => commitments.remoteCommit.spec.htlcs.exists(htlc => htlc.direction == IN && htlc.add.id == fulfill.id)
|
case Some(htlc) if htlc.paymentHash == sha256(fulfill.paymentPreimage) => Right(addRemoteProposal(commitments, fulfill))
|
||||||
|
case Some(htlc) => throw new RuntimeException(s"invalid htlc preimage for htlc id=${fulfill.id}")
|
||||||
|
case None => throw new RuntimeException(s"unknown htlc id=${fulfill.id}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!remoteSigned || !localSigned) {
|
|
||||||
logger.warn(
|
|
||||||
s"""htlc ${fulfill.id} remoteSigned=$remoteSigned localSigned=$localSigned
|
|
||||||
|${specsToString(commitments)}""".stripMargin)
|
|
||||||
}
|
|
||||||
remoteSigned && localSigned
|
|
||||||
}
|
|
||||||
|
|
||||||
def receiveFulfill(commitments: Commitments, fulfill: UpdateFulfillHtlc): (Commitments, UpdateAddHtlc) = {
|
def sendFail(commitments: Commitments, cmd: CMD_FAIL_HTLC): (Commitments, UpdateFailHtlc) =
|
||||||
require(isHtlcCrossSigned(commitments, fulfill), s"unknown htlc id=${fulfill.id}")
|
getHtlcCrossSigned(commitments, IN, cmd.id) match {
|
||||||
commitments.remoteCommit.spec.htlcs.collectFirst { case u: Htlc if u.direction == IN && u.add.id == fulfill.id => u.add } match {
|
|
||||||
case Some(htlc) if htlc.paymentHash == sha256(fulfill.paymentPreimage) => (addRemoteProposal(commitments, fulfill), htlc)
|
|
||||||
case Some(htlc) => throw new RuntimeException(s"invalid htlc preimage for htlc id=${fulfill.id}")
|
|
||||||
case None => ??? // never happens
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def sendFail(commitments: Commitments, cmd: CMD_FAIL_HTLC): (Commitments, UpdateFailHtlc) = {
|
|
||||||
commitments.localCommit.spec.htlcs.collectFirst { case u: Htlc if u.add.id == cmd.id => u.add } match {
|
|
||||||
case Some(htlc) =>
|
case Some(htlc) =>
|
||||||
val fail = UpdateFailHtlc(commitments.channelId, cmd.id, BinaryData(cmd.reason.getBytes("UTF-8")))
|
val fail = UpdateFailHtlc(commitments.channelId, cmd.id, BinaryData(cmd.reason.getBytes("UTF-8")))
|
||||||
val commitments1 = addLocalProposal(commitments, fail)
|
val commitments1 = addLocalProposal(commitments, fail)
|
||||||
(commitments1, fail)
|
(commitments1, fail)
|
||||||
case None => throw new RuntimeException(s"unknown htlc id=${cmd.id}")
|
case None => throw new RuntimeException(s"unknown htlc id=${cmd.id}")
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
def receiveFail(commitments: Commitments, fail: UpdateFailHtlc): (Commitments, UpdateAddHtlc) = {
|
def isOldFail(commitments: Commitments, fail: UpdateFailHtlc): Boolean =
|
||||||
commitments.remoteCommit.spec.htlcs.collectFirst { case u: Htlc if u.add.id == fail.id => u.add } match {
|
commitments.remoteChanges.proposed.contains(fail) ||
|
||||||
case Some(htlc) => (addRemoteProposal(commitments, fail), htlc)
|
commitments.remoteChanges.signed.contains(fail) ||
|
||||||
case None => throw new RuntimeException(s"unknown htlc id=${fail.id}") // TODO: we should fail the channel
|
commitments.remoteChanges.acked.contains(fail)
|
||||||
|
|
||||||
|
def receiveFail(commitments: Commitments, fail: UpdateFailHtlc): Either[Commitments, Commitments] =
|
||||||
|
isOldFail(commitments, fail) match {
|
||||||
|
case true => Left(commitments)
|
||||||
|
case false => getHtlcCrossSigned(commitments, OUT, fail.id) match {
|
||||||
|
case Some(htlc) => Right(addRemoteProposal(commitments, fail))
|
||||||
|
case None => throw new RuntimeException(s"unknown htlc id=${fail.id}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
def localHasChanges(commitments: Commitments): Boolean = commitments.remoteChanges.acked.size > 0 || commitments.localChanges.proposed.size > 0
|
def localHasChanges(commitments: Commitments): Boolean = commitments.remoteChanges.acked.size > 0 || commitments.localChanges.proposed.size > 0
|
||||||
|
|
||||||
@ -262,140 +255,149 @@ object Commitments extends Logging {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def isOldCommit(commitments: Commitments, commit: CommitSig): Boolean = {
|
def isOldCommit(commitments: Commitments, commit: CommitSig): Boolean = commitments.localCommit.commit == commit
|
||||||
commitments.localCommit.commit == commit
|
|
||||||
}
|
|
||||||
|
|
||||||
def receiveCommit(commitments: Commitments, commit: CommitSig): (Commitments, RevokeAndAck) = {
|
def receiveCommit(commitments: Commitments, commit: CommitSig): Either[Commitments, (Commitments, RevokeAndAck)] =
|
||||||
import commitments._
|
isOldCommit(commitments, commit) match {
|
||||||
// they sent us a signature for *their* view of *our* next commit tx
|
case true => Left(commitments)
|
||||||
// so in terms of rev.hashes and indexes we have:
|
case false =>
|
||||||
// ourCommit.index -> our current revocation hash, which is about to become our old revocation hash
|
import commitments._
|
||||||
// ourCommit.index + 1 -> our next revocation hash, used by * them * to build the sig we've just received, and which
|
// they sent us a signature for *their* view of *our* next commit tx
|
||||||
// is about to become our current revocation hash
|
// so in terms of rev.hashes and indexes we have:
|
||||||
// ourCommit.index + 2 -> which is about to become our next revocation hash
|
// ourCommit.index -> our current revocation hash, which is about to become our old revocation hash
|
||||||
// we will reply to this sig with our old revocation hash preimage (at index) and our next revocation hash (at index + 1)
|
// ourCommit.index + 1 -> our next revocation hash, used by * them * to build the sig we've just received, and which
|
||||||
// and will increment our index
|
// is about to become our current revocation hash
|
||||||
|
// ourCommit.index + 2 -> which is about to become our next revocation hash
|
||||||
|
// we will reply to this sig with our old revocation hash preimage (at index) and our next revocation hash (at index + 1)
|
||||||
|
// and will increment our index
|
||||||
|
|
||||||
if (!remoteHasChanges(commitments))
|
if (!remoteHasChanges(commitments))
|
||||||
throw new RuntimeException("cannot sign when there are no changes")
|
throw new RuntimeException("cannot sign when there are no changes")
|
||||||
|
|
||||||
// check that their signature is valid
|
// check that their signature is valid
|
||||||
// signatures are now optional in the commit message, and will be sent only if the other party is actually
|
// signatures are now optional in the commit message, and will be sent only if the other party is actually
|
||||||
// receiving money i.e its commit tx has one output for them
|
// receiving money i.e its commit tx has one output for them
|
||||||
|
|
||||||
val spec = CommitmentSpec.reduce(localCommit.spec, localChanges.acked, remoteChanges.proposed)
|
val spec = CommitmentSpec.reduce(localCommit.spec, localChanges.acked, remoteChanges.proposed)
|
||||||
val localPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, commitments.localCommit.index + 1)
|
val localPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, commitments.localCommit.index + 1)
|
||||||
val (localCommitTx, htlcTimeoutTxs, htlcSuccessTxs) = makeLocalTxs(localCommit.index + 1, localParams, remoteParams, commitInput, localPerCommitmentPoint, spec)
|
val (localCommitTx, htlcTimeoutTxs, htlcSuccessTxs) = makeLocalTxs(localCommit.index + 1, localParams, remoteParams, commitInput, localPerCommitmentPoint, spec)
|
||||||
val sig = Transactions.sign(localCommitTx, localParams.fundingPrivKey)
|
val sig = Transactions.sign(localCommitTx, localParams.fundingPrivKey)
|
||||||
|
|
||||||
// TODO: should we have optional sig? (original comment: this tx will NOT be signed if our output is empty)
|
// TODO: should we have optional sig? (original comment: this tx will NOT be signed if our output is empty)
|
||||||
|
|
||||||
// no need to compute htlc sigs if commit sig doesn't check out
|
// no need to compute htlc sigs if commit sig doesn't check out
|
||||||
val signedCommitTx = Transactions.addSigs(localCommitTx, localParams.fundingPrivKey.publicKey, remoteParams.fundingPubKey, sig, commit.signature)
|
val signedCommitTx = Transactions.addSigs(localCommitTx, localParams.fundingPrivKey.publicKey, remoteParams.fundingPubKey, sig, commit.signature)
|
||||||
if (Transactions.checkSpendable(signedCommitTx).isFailure) {
|
if (Transactions.checkSpendable(signedCommitTx).isFailure) {
|
||||||
throw new RuntimeException("invalid sig")
|
throw new RuntimeException("invalid sig")
|
||||||
|
}
|
||||||
|
|
||||||
|
val sortedHtlcTxs: Seq[TransactionWithInputInfo] = (htlcTimeoutTxs ++ htlcSuccessTxs).sortBy(_.input.outPoint.index)
|
||||||
|
require(commit.htlcSignatures.size == sortedHtlcTxs.size, s"htlc sig count mismatch (received=${commit.htlcSignatures.size}, expected=${sortedHtlcTxs.size})")
|
||||||
|
val localPaymentKey = Generators.derivePrivKey(localParams.paymentKey, localPerCommitmentPoint)
|
||||||
|
val htlcSigs = sortedHtlcTxs.map(Transactions.sign(_, localPaymentKey))
|
||||||
|
val remotePaymentPubkey = Generators.derivePubKey(remoteParams.paymentBasepoint, localPerCommitmentPoint)
|
||||||
|
// combine the sigs to make signed txes
|
||||||
|
val htlcTxsAndSigs = sortedHtlcTxs
|
||||||
|
.zip(htlcSigs)
|
||||||
|
.zip(commit.htlcSignatures) // this is a list of ((tx, localSig), remoteSig)
|
||||||
|
.map(e => (e._1._1, e._1._2, e._2)) // this is a list of (tx, localSig, remoteSig)
|
||||||
|
.collect {
|
||||||
|
case (htlcTx: HtlcTimeoutTx, localSig, remoteSig) =>
|
||||||
|
require(Transactions.checkSpendable(Transactions.addSigs(htlcTx, localSig, remoteSig)).isSuccess, "bad sig")
|
||||||
|
HtlcTxAndSigs(htlcTx, localSig, remoteSig)
|
||||||
|
case (htlcTx: HtlcSuccessTx, localSig, remoteSig) =>
|
||||||
|
// we can't check that htlc-success tx are spendable because we need the payment preimage; thus we only check the remote sig
|
||||||
|
require(Transactions.checkSig(htlcTx, remoteSig, remotePaymentPubkey), "bad sig")
|
||||||
|
HtlcTxAndSigs(htlcTx, localSig, remoteSig)
|
||||||
|
}
|
||||||
|
|
||||||
|
val timeoutHtlcSigs = htlcTxsAndSigs.collect {
|
||||||
|
case HtlcTxAndSigs(_: HtlcTimeoutTx, localSig, _) => localSig
|
||||||
|
}
|
||||||
|
|
||||||
|
// we will send our revocation preimage + our next revocation hash
|
||||||
|
val localPerCommitmentSecret = Generators.perCommitSecret(localParams.shaSeed, commitments.localCommit.index)
|
||||||
|
val localNextPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, commitments.localCommit.index + 2)
|
||||||
|
val revocation = RevokeAndAck(
|
||||||
|
channelId = commitments.channelId,
|
||||||
|
perCommitmentSecret = localPerCommitmentSecret,
|
||||||
|
nextPerCommitmentPoint = localNextPerCommitmentPoint,
|
||||||
|
htlcTimeoutSignatures = timeoutHtlcSigs.toList
|
||||||
|
)
|
||||||
|
|
||||||
|
// update our commitment data
|
||||||
|
val ourCommit1 = LocalCommit(
|
||||||
|
index = localCommit.index + 1,
|
||||||
|
spec,
|
||||||
|
publishableTxs = PublishableTxs(signedCommitTx, htlcTxsAndSigs),
|
||||||
|
commit = commit)
|
||||||
|
val ourChanges1 = localChanges.copy(acked = Nil)
|
||||||
|
val theirChanges1 = remoteChanges.copy(proposed = Nil, acked = remoteChanges.acked ++ remoteChanges.proposed)
|
||||||
|
// they have received our previous revocation (otherwise they wouldn't have sent a commit)
|
||||||
|
// so we can acknowledge the revocation
|
||||||
|
val unackedMessages1 = commitments.unackedMessages.filterNot(_.isInstanceOf[RevokeAndAck]) :+ revocation
|
||||||
|
val commitments1 = commitments.copy(localCommit = ourCommit1, localChanges = ourChanges1, remoteChanges = theirChanges1, unackedMessages = unackedMessages1)
|
||||||
|
|
||||||
|
logger.debug(s"current commit: index=${ourCommit1.index} htlc_in=${ourCommit1.spec.htlcs.filter(_.direction == IN).size} htlc_out=${ourCommit1.spec.htlcs.filter(_.direction == OUT).size} txid=${ourCommit1.publishableTxs.commitTx.tx.txid} tx=${Transaction.write(ourCommit1.publishableTxs.commitTx.tx)}")
|
||||||
|
|
||||||
|
Right((commitments1, revocation))
|
||||||
}
|
}
|
||||||
|
|
||||||
val sortedHtlcTxs: Seq[TransactionWithInputInfo] = (htlcTimeoutTxs ++ htlcSuccessTxs).sortBy(_.input.outPoint.index)
|
def isOldRevocation(commitments: Commitments, revocation: RevokeAndAck): Boolean =
|
||||||
require(commit.htlcSignatures.size == sortedHtlcTxs.size, s"htlc sig count mismatch (received=${commit.htlcSignatures.size}, expected=${sortedHtlcTxs.size})")
|
|
||||||
val localPaymentKey = Generators.derivePrivKey(localParams.paymentKey, localPerCommitmentPoint)
|
|
||||||
val htlcSigs = sortedHtlcTxs.map(Transactions.sign(_, localPaymentKey))
|
|
||||||
val remotePaymentPubkey = Generators.derivePubKey(remoteParams.paymentBasepoint, localPerCommitmentPoint)
|
|
||||||
// combine the sigs to make signed txes
|
|
||||||
val htlcTxsAndSigs = sortedHtlcTxs
|
|
||||||
.zip(htlcSigs)
|
|
||||||
.zip(commit.htlcSignatures) // this is a list of ((tx, localSig), remoteSig)
|
|
||||||
.map(e => (e._1._1, e._1._2, e._2)) // this is a list of (tx, localSig, remoteSig)
|
|
||||||
.collect {
|
|
||||||
case (htlcTx: HtlcTimeoutTx, localSig, remoteSig) =>
|
|
||||||
require(Transactions.checkSpendable(Transactions.addSigs(htlcTx, localSig, remoteSig)).isSuccess, "bad sig")
|
|
||||||
HtlcTxAndSigs(htlcTx, localSig, remoteSig)
|
|
||||||
case (htlcTx: HtlcSuccessTx, localSig, remoteSig) =>
|
|
||||||
// we can't check that htlc-success tx are spendable because we need the payment preimage; thus we only check the remote sig
|
|
||||||
require(Transactions.checkSig(htlcTx, remoteSig, remotePaymentPubkey), "bad sig")
|
|
||||||
HtlcTxAndSigs(htlcTx, localSig, remoteSig)
|
|
||||||
}
|
|
||||||
|
|
||||||
val timeoutHtlcSigs = htlcTxsAndSigs.collect {
|
|
||||||
case HtlcTxAndSigs(_: HtlcTimeoutTx, localSig, _) => localSig
|
|
||||||
}
|
|
||||||
|
|
||||||
// we will send our revocation preimage + our next revocation hash
|
|
||||||
val localPerCommitmentSecret = Generators.perCommitSecret(localParams.shaSeed, commitments.localCommit.index)
|
|
||||||
val localNextPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, commitments.localCommit.index + 2)
|
|
||||||
val revocation = RevokeAndAck(
|
|
||||||
channelId = commitments.channelId,
|
|
||||||
perCommitmentSecret = localPerCommitmentSecret,
|
|
||||||
nextPerCommitmentPoint = localNextPerCommitmentPoint,
|
|
||||||
htlcTimeoutSignatures = timeoutHtlcSigs.toList
|
|
||||||
)
|
|
||||||
|
|
||||||
// update our commitment data
|
|
||||||
val ourCommit1 = LocalCommit(
|
|
||||||
index = localCommit.index + 1,
|
|
||||||
spec,
|
|
||||||
publishableTxs = PublishableTxs(signedCommitTx, htlcTxsAndSigs),
|
|
||||||
commit = commit)
|
|
||||||
val ourChanges1 = localChanges.copy(acked = Nil)
|
|
||||||
val theirChanges1 = remoteChanges.copy(proposed = Nil, acked = remoteChanges.acked ++ remoteChanges.proposed)
|
|
||||||
// they have received our previous revocation (otherwise they wouldn't have sent a commit)
|
|
||||||
// so we can acknowledge the revocation
|
|
||||||
val unackedMessages1 = commitments.unackedMessages.filterNot(_.isInstanceOf[RevokeAndAck]) :+ revocation
|
|
||||||
val commitments1 = commitments.copy(localCommit = ourCommit1, localChanges = ourChanges1, remoteChanges = theirChanges1, unackedMessages = unackedMessages1)
|
|
||||||
|
|
||||||
logger.debug(s"current commit: index=${ourCommit1.index} htlc_in=${ourCommit1.spec.htlcs.filter(_.direction == IN).size} htlc_out=${ourCommit1.spec.htlcs.filter(_.direction == OUT).size} txid=${ourCommit1.publishableTxs.commitTx.tx.txid} tx=${Transaction.write(ourCommit1.publishableTxs.commitTx.tx)}")
|
|
||||||
|
|
||||||
(commitments1, revocation)
|
|
||||||
}
|
|
||||||
|
|
||||||
def isOldRevocation(commitments: Commitments, revocation: RevokeAndAck): Boolean = {
|
|
||||||
commitments.remoteNextCommitInfo match {
|
commitments.remoteNextCommitInfo match {
|
||||||
case Right(point) if point == revocation.nextPerCommitmentPoint => true
|
case Right(point) if point == revocation.nextPerCommitmentPoint => true
|
||||||
case Left(waitForRevocation) if waitForRevocation.nextRemoteCommit.remotePerCommitmentPoint == revocation.nextPerCommitmentPoint => true
|
case Left(waitForRevocation) if waitForRevocation.nextRemoteCommit.remotePerCommitmentPoint == revocation.nextPerCommitmentPoint => true
|
||||||
case _ => false
|
case _ => false
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
def receiveRevocation(commitments: Commitments, revocation: RevokeAndAck): Commitments = {
|
def receiveRevocation(commitments: Commitments, revocation: RevokeAndAck): Either[Commitments, Commitments] =
|
||||||
import commitments._
|
isOldRevocation(commitments, revocation) match {
|
||||||
// we receive a revocation because we just sent them a sig for their next commit tx
|
case true => Left(commitments)
|
||||||
remoteNextCommitInfo match {
|
case false =>
|
||||||
case Left(_) if revocation.perCommitmentSecret.toPoint != remoteCommit.remotePerCommitmentPoint =>
|
import commitments._
|
||||||
throw new RuntimeException("invalid preimage")
|
// we receive a revocation because we just sent them a sig for their next commit tx
|
||||||
case Left(WaitingForRevocation(theirNextCommit, _, _)) =>
|
remoteNextCommitInfo match {
|
||||||
// we rebuild the transactions a 2nd time but we are just interested in HTLC-timeout txs because we need to check their sig
|
case Left(_) if revocation.perCommitmentSecret.toPoint != remoteCommit.remotePerCommitmentPoint =>
|
||||||
val (_, htlcTimeoutTxs, _) = makeRemoteTxs(theirNextCommit.index, localParams, remoteParams, commitInput, theirNextCommit.remotePerCommitmentPoint, theirNextCommit.spec)
|
throw new RuntimeException("invalid preimage")
|
||||||
// then we sort and sign them
|
case Left(WaitingForRevocation(theirNextCommit, _, _)) =>
|
||||||
val sortedHtlcTimeoutTxs = htlcTimeoutTxs.sortBy(_.input.outPoint.index)
|
// we rebuild the transactions a 2nd time but we are just interested in HTLC-timeout txs because we need to check their sig
|
||||||
require(revocation.htlcTimeoutSignatures.size == sortedHtlcTimeoutTxs.size, s"htlc-timeout sig count mismatch (received=${revocation.htlcTimeoutSignatures.size}, expected=${sortedHtlcTimeoutTxs.size})")
|
val (_, htlcTimeoutTxs, _) = makeRemoteTxs(theirNextCommit.index, localParams, remoteParams, commitInput, theirNextCommit.remotePerCommitmentPoint, theirNextCommit.spec)
|
||||||
val paymentKey = Generators.derivePrivKey(localParams.paymentKey, theirNextCommit.remotePerCommitmentPoint)
|
// then we sort and sign them
|
||||||
val htlcSigs = sortedHtlcTimeoutTxs.map(Transactions.sign(_, paymentKey))
|
val sortedHtlcTimeoutTxs = htlcTimeoutTxs.sortBy(_.input.outPoint.index)
|
||||||
// combine the sigs to make signed txes
|
require(revocation.htlcTimeoutSignatures.size == sortedHtlcTimeoutTxs.size, s"htlc-timeout sig count mismatch (received=${
|
||||||
val signedHtlcTxs = sortedHtlcTimeoutTxs
|
revocation.htlcTimeoutSignatures.size
|
||||||
.zip(htlcSigs)
|
}, expected=${
|
||||||
.zip(revocation.htlcTimeoutSignatures) // this is a list of ((tx, localSig), remoteSig)
|
sortedHtlcTimeoutTxs.size
|
||||||
.map(e => (e._1._1, e._1._2, e._2)) // this is a list of (tx, localSig, remoteSig)
|
})")
|
||||||
.map(x => Transactions.addSigs(x._1, x._3, x._2))
|
val paymentKey = Generators.derivePrivKey(localParams.paymentKey, theirNextCommit.remotePerCommitmentPoint)
|
||||||
|
val htlcSigs = sortedHtlcTimeoutTxs.map(Transactions.sign(_, paymentKey))
|
||||||
|
// combine the sigs to make signed txes
|
||||||
|
val signedHtlcTxs = sortedHtlcTimeoutTxs
|
||||||
|
.zip(htlcSigs)
|
||||||
|
.zip(revocation.htlcTimeoutSignatures) // this is a list of ((tx, localSig), remoteSig)
|
||||||
|
.map(e => (e._1._1, e._1._2, e._2)) // this is a list of (tx, localSig, remoteSig)
|
||||||
|
.map(x => Transactions.addSigs(x._1, x._3, x._2))
|
||||||
|
|
||||||
// and finally whe check the sigs
|
// and finally whe check the sigs
|
||||||
require(signedHtlcTxs.forall(Transactions.checkSpendable(_).isSuccess), "bad sig")
|
require(signedHtlcTxs.forall(Transactions.checkSpendable(_).isSuccess), "bad sig")
|
||||||
|
|
||||||
// they have received our last commitsig (otherwise they wouldn't have replied with a revocation)
|
// they have received our last commitsig (otherwise they wouldn't have replied with a revocation)
|
||||||
// so we can acknowledge all our previous updates and the commitsig
|
// so we can acknowledge all our previous updates and the commitsig
|
||||||
val unackedMessages1 = commitments.unackedMessages.drop(commitments.unackedMessages.indexWhere(_.isInstanceOf[CommitSig]) + 1)
|
val unackedMessages1 = commitments.unackedMessages.drop(commitments.unackedMessages.indexWhere(_.isInstanceOf[CommitSig]) + 1)
|
||||||
|
|
||||||
commitments.copy(
|
val commitments1 = commitments.copy(
|
||||||
localChanges = localChanges.copy(signed = Nil, acked = localChanges.acked ++ localChanges.signed),
|
localChanges = localChanges.copy(signed = Nil, acked = localChanges.acked ++ localChanges.signed),
|
||||||
remoteChanges = remoteChanges.copy(signed = Nil),
|
remoteChanges = remoteChanges.copy(signed = Nil),
|
||||||
remoteCommit = theirNextCommit,
|
remoteCommit = theirNextCommit,
|
||||||
remoteNextCommitInfo = Right(revocation.nextPerCommitmentPoint),
|
remoteNextCommitInfo = Right(revocation.nextPerCommitmentPoint),
|
||||||
unackedMessages = unackedMessages1,
|
unackedMessages = unackedMessages1,
|
||||||
remotePerCommitmentSecrets = commitments.remotePerCommitmentSecrets.addHash(revocation.perCommitmentSecret, 0xFFFFFFFFFFFFL - commitments.remoteCommit.index))
|
remotePerCommitmentSecrets = commitments.remotePerCommitmentSecrets.addHash(revocation.perCommitmentSecret, 0xFFFFFFFFFFFFL - commitments.remoteCommit.index))
|
||||||
case Right(_) =>
|
|
||||||
throw new RuntimeException("received unexpected RevokeAndAck message")
|
Right(commitments1)
|
||||||
|
case Right(_) =>
|
||||||
|
throw new RuntimeException("received unexpected RevokeAndAck message")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
def makeLocalTxs(commitTxNumber: Long, localParams: LocalParams, remoteParams: RemoteParams, commitmentInput: InputInfo, localPerCommitmentPoint: Point, spec: CommitmentSpec): (CommitTx, Seq[HtlcTimeoutTx], Seq[HtlcSuccessTx]) = {
|
def makeLocalTxs(commitTxNumber: Long, localParams: LocalParams, remoteParams: RemoteParams, commitmentInput: InputInfo, localPerCommitmentPoint: Point, spec: CommitmentSpec): (CommitTx, Seq[HtlcTimeoutTx], Seq[HtlcSuccessTx]) = {
|
||||||
val localPubkey = Generators.derivePubKey(localParams.paymentKey.toPoint, localPerCommitmentPoint)
|
val localPubkey = Generators.derivePubKey(localParams.paymentKey.toPoint, localPerCommitmentPoint)
|
||||||
@ -428,41 +430,41 @@ object Commitments extends Logging {
|
|||||||
case _ => "???"
|
case _ => "???"
|
||||||
}
|
}
|
||||||
|
|
||||||
def toString(commitments: Commitments): String = {
|
def changesToString(commitments: Commitments): String = {
|
||||||
import commitments._
|
import commitments._
|
||||||
s"""commitments:
|
s"""commitments:
|
||||||
| localChanges:
|
| localChanges:
|
||||||
| proposed: ${localChanges.proposed.map(msg2String(_)).mkString(" ")}
|
| proposed: ${localChanges.proposed.map(msg2String(_)).mkString(" ")}
|
||||||
| signed: ${localChanges.signed.map(msg2String(_)).mkString(" ")}
|
| signed: ${localChanges.signed.map(msg2String(_)).mkString(" ")}
|
||||||
| acked: ${localChanges.acked.map(msg2String(_)).mkString(" ")}
|
| acked: ${localChanges.acked.map(msg2String(_)).mkString(" ")}
|
||||||
| remoteChanges:
|
| remoteChanges:
|
||||||
| proposed: ${remoteChanges.proposed.map(msg2String(_)).mkString(" ")}
|
| proposed: ${remoteChanges.proposed.map(msg2String(_)).mkString(" ")}
|
||||||
| acked: ${remoteChanges.acked.map(msg2String(_)).mkString(" ")}
|
| acked: ${remoteChanges.acked.map(msg2String(_)).mkString(" ")}
|
||||||
| signed: ${remoteChanges.signed.map(msg2String(_)).mkString(" ")}
|
| signed: ${remoteChanges.signed.map(msg2String(_)).mkString(" ")}
|
||||||
| nextHtlcId:
|
| nextHtlcId:
|
||||||
| local: $localNextHtlcId
|
| local: $localNextHtlcId
|
||||||
| remote: $remoteNextHtlcId
|
| remote: $remoteNextHtlcId
|
||||||
| unackedMessages:
|
| unackedMessages:
|
||||||
| ${unackedMessages.map(msg2String(_)).mkString(" ")}""".stripMargin
|
| ${unackedMessages.map(msg2String(_)).mkString(" ")}""".stripMargin
|
||||||
}
|
}
|
||||||
|
|
||||||
def specsToString(commitments: Commitments): String = {
|
def specsToString(commitments: Commitments): String = {
|
||||||
s"""specs:
|
s"""specs:
|
||||||
|localcommit:
|
|localcommit:
|
||||||
| toLocal: ${commitments.localCommit.spec.toLocalMsat}
|
| toLocal: ${commitments.localCommit.spec.toLocalMsat}
|
||||||
| toRemote: ${commitments.localCommit.spec.toRemoteMsat}
|
| toRemote: ${commitments.localCommit.spec.toRemoteMsat}
|
||||||
| htlcs:
|
| htlcs:
|
||||||
|${commitments.localCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")}
|
|${commitments.localCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")}
|
||||||
|remotecommit:
|
|remotecommit:
|
||||||
| toLocal: ${commitments.remoteCommit.spec.toLocalMsat}
|
| toLocal: ${commitments.remoteCommit.spec.toLocalMsat}
|
||||||
| toRemote: ${commitments.remoteCommit.spec.toRemoteMsat}
|
| toRemote: ${commitments.remoteCommit.spec.toRemoteMsat}
|
||||||
| htlcs:
|
| htlcs:
|
||||||
|${commitments.remoteCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")}
|
|${commitments.remoteCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")}
|
||||||
|next remotecommit:
|
|next remotecommit:
|
||||||
| toLocal: ${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.toLocalMsat).getOrElse("N/A")}
|
| toLocal: ${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.toLocalMsat).getOrElse("N/A")}
|
||||||
| toRemote: ${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.toRemoteMsat).getOrElse("N/A")}
|
| toRemote: ${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.toRemoteMsat).getOrElse("N/A")}
|
||||||
| htlcs:
|
| htlcs:
|
||||||
|${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")).getOrElse("N/A")}""".stripMargin
|
|${commitments.remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit.spec.htlcs.map(h => s" ${h.direction} ${h.add.id} ${h.add.expiry}").mkString("\n")).getOrElse("N/A")}""".stripMargin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,8 +8,8 @@ import fr.acinq.bitcoin.BinaryData
|
|||||||
*/
|
*/
|
||||||
class PaymentEvent
|
class PaymentEvent
|
||||||
|
|
||||||
case class PaymentSent(channel: ActorRef, h: BinaryData) extends PaymentEvent
|
//case class PaymentSent(channel: ActorRef, h: BinaryData) extends PaymentEvent
|
||||||
|
|
||||||
case class PaymentFailed(channel: ActorRef, h: BinaryData, reason: String) extends PaymentEvent
|
//case class PaymentFailed(channel: ActorRef, h: BinaryData, reason: String) extends PaymentEvent
|
||||||
|
|
||||||
case class PaymentReceived(channel: ActorRef, h: BinaryData) extends PaymentEvent
|
case class PaymentReceived(channel: ActorRef, h: BinaryData) extends PaymentEvent
|
||||||
|
@ -8,7 +8,7 @@ import fr.acinq.eclair._
|
|||||||
import fr.acinq.eclair.channel.{CMD_ADD_HTLC, Register}
|
import fr.acinq.eclair.channel.{CMD_ADD_HTLC, Register}
|
||||||
import fr.acinq.eclair.crypto.Sphinx
|
import fr.acinq.eclair.crypto.Sphinx
|
||||||
import fr.acinq.eclair.router._
|
import fr.acinq.eclair.router._
|
||||||
import fr.acinq.eclair.wire.{Codecs, PerHopPayload}
|
import fr.acinq.eclair.wire.{Codecs, PerHopPayload, UpdateFailHtlc, UpdateFulfillHtlc}
|
||||||
import scodec.Attempt
|
import scodec.Attempt
|
||||||
|
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
@ -46,7 +46,6 @@ class PaymentLifecycle(sourceNodeId: BinaryData, router: ActorRef) extends Loggi
|
|||||||
case Event(RouteResponse(hops), WaitingForRoute(s, c)) =>
|
case Event(RouteResponse(hops), WaitingForRoute(s, c)) =>
|
||||||
val firstHop = hops.head
|
val firstHop = hops.head
|
||||||
val cmd = buildCommand(c.amountMsat, c.paymentHash, hops, Globals.blockCount.get().toInt)
|
val cmd = buildCommand(c.amountMsat, c.paymentHash, hops, Globals.blockCount.get().toInt)
|
||||||
context.system.eventStream.subscribe(self, classOf[PaymentEvent])
|
|
||||||
context.actorSelection(Register.actorPathToChannelId(firstHop.lastUpdate.channelId)) ! cmd
|
context.actorSelection(Register.actorPathToChannelId(firstHop.lastUpdate.channelId)) ! cmd
|
||||||
goto(WAITING_FOR_PAYMENT_COMPLETE) using WaitingForComplete(s, cmd)
|
goto(WAITING_FOR_PAYMENT_COMPLETE) using WaitingForComplete(s, cmd)
|
||||||
|
|
||||||
@ -58,15 +57,17 @@ class PaymentLifecycle(sourceNodeId: BinaryData, router: ActorRef) extends Loggi
|
|||||||
when(WAITING_FOR_PAYMENT_COMPLETE) {
|
when(WAITING_FOR_PAYMENT_COMPLETE) {
|
||||||
case Event("ok", _) => stay()
|
case Event("ok", _) => stay()
|
||||||
|
|
||||||
case Event(e@PaymentSent(_, h), WaitingForComplete(s, cmd)) if h == cmd.paymentHash =>
|
|
||||||
s ! "sent"
|
|
||||||
stop(FSM.Normal)
|
|
||||||
|
|
||||||
case Event(reason: String, WaitingForComplete(s, _)) =>
|
case Event(reason: String, WaitingForComplete(s, _)) =>
|
||||||
s ! Status.Failure(new RuntimeException(reason))
|
s ! Status.Failure(new RuntimeException(reason))
|
||||||
stop(FSM.Failure(reason))
|
stop(FSM.Failure(reason))
|
||||||
|
|
||||||
case Event(e@PaymentFailed(_, h, reason), WaitingForComplete(s, cmd)) if h == cmd.paymentHash =>
|
case Event(fulfill: UpdateFulfillHtlc, WaitingForComplete(s, cmd)) =>
|
||||||
|
s ! "sent"
|
||||||
|
stop(FSM.Normal)
|
||||||
|
|
||||||
|
case Event(fail: UpdateFailHtlc, WaitingForComplete(s, cmd)) =>
|
||||||
|
// TODO: fix new String(fail.reason)
|
||||||
|
val reason = new String(fail.reason)
|
||||||
s ! Status.Failure(new RuntimeException(reason))
|
s ! Status.Failure(new RuntimeException(reason))
|
||||||
stop(FSM.Failure(reason))
|
stop(FSM.Failure(reason))
|
||||||
}
|
}
|
||||||
@ -128,7 +129,7 @@ object PaymentLifecycle {
|
|||||||
val nodes = hops.map(_.nextNodeId)
|
val nodes = hops.map(_.nextNodeId)
|
||||||
// BOLT 2 requires that associatedData == paymentHash
|
// BOLT 2 requires that associatedData == paymentHash
|
||||||
val onion = buildOnion(nodes, payloads, paymentHash)
|
val onion = buildOnion(nodes, payloads, paymentHash)
|
||||||
CMD_ADD_HTLC(firstAmountMsat, paymentHash, firstExpiry, onion, commit = true)
|
CMD_ADD_HTLC(firstAmountMsat, paymentHash, firstExpiry, onion, upstream_opt = None, commit = true)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -17,10 +17,13 @@ import scala.util.{Failure, Success, Try}
|
|||||||
case class OutgoingChannel(channelId: Long, channel: ActorRef, nodeAddress: BinaryData)
|
case class OutgoingChannel(channelId: Long, channel: ActorRef, nodeAddress: BinaryData)
|
||||||
|
|
||||||
sealed trait Origin
|
sealed trait Origin
|
||||||
case object Local extends Origin
|
case class Local(sender: ActorRef) extends Origin
|
||||||
case class Relayed(downstream: UpdateAddHtlc) extends Origin
|
case class Relayed(upstream: UpdateAddHtlc) extends Origin
|
||||||
|
|
||||||
case class Binding(add: UpdateAddHtlc, origin: Origin)
|
case class Bind(add: UpdateAddHtlc, origin: Origin)
|
||||||
|
case class ForwardAdd(add: UpdateAddHtlc)
|
||||||
|
case class ForwardFulfill(fulfill: UpdateFulfillHtlc)
|
||||||
|
case class ForwardFail(fail: UpdateFailHtlc)
|
||||||
|
|
||||||
// @formatter:on
|
// @formatter:on
|
||||||
|
|
||||||
@ -34,26 +37,28 @@ class Relayer(nodeSecret: PrivateKey, paymentHandler: ActorRef) extends Actor wi
|
|||||||
|
|
||||||
override def receive: Receive = main(Set(), Map())
|
override def receive: Receive = main(Set(), Map())
|
||||||
|
|
||||||
def main(upstreams: Set[OutgoingChannel], bindings: Map[UpdateAddHtlc, Origin]): Receive = {
|
case class DownstreamHtlcId(channelId: Long, htlcId: Long)
|
||||||
|
|
||||||
|
def main(channels: Set[OutgoingChannel], bindings: Map[DownstreamHtlcId, Origin]): Receive = {
|
||||||
|
|
||||||
case ChannelChangedState(channel, _, remoteNodeId, _, NORMAL, d: DATA_NORMAL) =>
|
case ChannelChangedState(channel, _, remoteNodeId, _, NORMAL, d: DATA_NORMAL) =>
|
||||||
import d.commitments.channelId
|
import d.commitments.channelId
|
||||||
log.info(s"adding channel $channelId to available upstreams")
|
log.info(s"adding channel $channelId to available channels")
|
||||||
context become main(upstreams + OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
context become main(channels + OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
||||||
|
|
||||||
case ChannelChangedState(channel, _, remoteNodeId, _, NEGOTIATING, d: DATA_NEGOTIATING) =>
|
case ChannelChangedState(channel, _, remoteNodeId, _, NEGOTIATING, d: DATA_NEGOTIATING) =>
|
||||||
import d.commitments.channelId
|
import d.commitments.channelId
|
||||||
log.info(s"removing channel $channelId from upstreams/downstreams (mutual close)")
|
log.info(s"removing channel $channelId from available channels")
|
||||||
// TODO: cleanup bindings
|
// TODO: cleanup bindings
|
||||||
context become main(upstreams - OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
context become main(channels - OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
||||||
|
|
||||||
case ChannelChangedState(channel, _, remoteNodeId, _, CLOSING, d: DATA_CLOSING) =>
|
case ChannelChangedState(channel, _, remoteNodeId, _, CLOSING, d: DATA_CLOSING) =>
|
||||||
import d.commitments.channelId
|
import d.commitments.channelId
|
||||||
log.info(s"removing channel $channelId from upstreams/downstreams (unilateral close)")
|
log.info(s"removing channel $channelId from available channels")
|
||||||
// TODO: cleanup bindings
|
// TODO: cleanup bindings
|
||||||
context become main(upstreams - OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
context become main(channels - OutgoingChannel(channelId, channel, remoteNodeId.hash160), bindings)
|
||||||
|
|
||||||
case add: UpdateAddHtlc =>
|
case ForwardAdd(add) =>
|
||||||
Try(Sphinx.parsePacket(nodeSecret, add.paymentHash, add.onionRoutingPacket))
|
Try(Sphinx.parsePacket(nodeSecret, add.paymentHash, add.onionRoutingPacket))
|
||||||
.map {
|
.map {
|
||||||
case (payload, nextNodeAddress, nextPacket) => (Codecs.perHopPayloadCodec.decode(BitVector(payload.data)), nextNodeAddress, nextPacket)
|
case (payload, nextNodeAddress, nextPacket) => (Codecs.perHopPayloadCodec.decode(BitVector(payload.data)), nextNodeAddress, nextPacket)
|
||||||
@ -62,13 +67,13 @@ class Relayer(nodeSecret: PrivateKey, paymentHandler: ActorRef) extends Actor wi
|
|||||||
log.info(s"we are the final recipient of htlc #${add.id}")
|
log.info(s"we are the final recipient of htlc #${add.id}")
|
||||||
context.system.eventStream.publish(PaymentReceived(self, add.paymentHash))
|
context.system.eventStream.publish(PaymentReceived(self, add.paymentHash))
|
||||||
paymentHandler forward add
|
paymentHandler forward add
|
||||||
case Success((Attempt.Successful(DecodeResult(payload, _)), nextNodeAddress, nextPacket)) if upstreams.exists(_.nodeAddress == nextNodeAddress) =>
|
case Success((Attempt.Successful(DecodeResult(payload, _)), nextNodeAddress, nextPacket)) if channels.exists(_.nodeAddress == nextNodeAddress) =>
|
||||||
val upstream = upstreams.find(_.nodeAddress == nextNodeAddress).get.channel
|
val downstream = channels.find(_.nodeAddress == nextNodeAddress).get.channel
|
||||||
log.info(s"forwarding htlc #${add.id} to upstream=$upstream")
|
log.info(s"forwarding htlc #${add.id} to downstream=$downstream")
|
||||||
upstream ! CMD_ADD_HTLC(payload.amt_to_forward, add.paymentHash, payload.outgoing_cltv_value, nextPacket, origin = Relayed(add), commit = true)
|
downstream ! CMD_ADD_HTLC(payload.amt_to_forward, add.paymentHash, payload.outgoing_cltv_value, nextPacket, upstream_opt = Some(add), commit = true)
|
||||||
context become main(upstreams, bindings)
|
context become main(channels, bindings)
|
||||||
case Success((Attempt.Successful(DecodeResult(_, _)), nextNodeAddress, _)) =>
|
case Success((Attempt.Successful(DecodeResult(_, _)), nextNodeAddress, _)) =>
|
||||||
log.warning(s"couldn't resolve upstream node address $nextNodeAddress, failing htlc #${add.id}")
|
log.warning(s"couldn't resolve downstream node address $nextNodeAddress, failing htlc #${add.id}")
|
||||||
sender ! CMD_FAIL_HTLC(add.id, "route error", commit = true)
|
sender ! CMD_FAIL_HTLC(add.id, "route error", commit = true)
|
||||||
case Success((Attempt.Failure(cause), _, _)) =>
|
case Success((Attempt.Failure(cause), _, _)) =>
|
||||||
log.error(s"couldn't parse payload: $cause")
|
log.error(s"couldn't parse payload: $cause")
|
||||||
@ -78,45 +83,41 @@ class Relayer(nodeSecret: PrivateKey, paymentHandler: ActorRef) extends Actor wi
|
|||||||
sender ! CMD_FAIL_HTLC(add.id, "onion parsing error", commit = true)
|
sender ! CMD_FAIL_HTLC(add.id, "onion parsing error", commit = true)
|
||||||
}
|
}
|
||||||
|
|
||||||
case Binding(upstream, origin) =>
|
case Bind(downstream, origin) =>
|
||||||
origin match {
|
origin match {
|
||||||
case Local => log.info(s"we are the origin of htlc ${upstream.channelId}/${upstream.id}")
|
case Local(_) => log.info(s"we are the origin of htlc ${downstream.channelId}/${downstream.id}")
|
||||||
case Relayed(downstream) => log.info(s"relayed htlc ${downstream.channelId}/${downstream.id} to ${upstream.channelId}/${upstream.id}")
|
case Relayed(upstream) => log.info(s"relayed htlc ${upstream.channelId}/${upstream.id} to ${downstream}/${downstream.id}")
|
||||||
}
|
}
|
||||||
context become main(upstreams, bindings + (upstream -> origin))
|
context become main(channels, bindings + (DownstreamHtlcId(downstream.channelId, downstream.id) -> origin))
|
||||||
|
|
||||||
case (add: UpdateAddHtlc, fulfill: UpdateFulfillHtlc) =>
|
case ForwardFulfill(fulfill) =>
|
||||||
bindings.get(add) match {
|
bindings.get(DownstreamHtlcId(fulfill.channelId, fulfill.id)) match {
|
||||||
case Some(Relayed(origin)) if upstreams.exists(_.channelId == origin.channelId) =>
|
case Some(Relayed(origin)) if channels.exists(_.channelId == origin.channelId) =>
|
||||||
val downstream = upstreams.find(_.channelId == origin.channelId).get.channel
|
val upstream = channels.find(_.channelId == origin.channelId).get.channel
|
||||||
downstream ! CMD_SIGN
|
upstream ! CMD_FULFILL_HTLC(origin.id, fulfill.paymentPreimage, commit = true)
|
||||||
downstream ! CMD_FULFILL_HTLC(origin.id, fulfill.paymentPreimage)
|
|
||||||
downstream ! CMD_SIGN
|
|
||||||
case Some(Relayed(origin)) =>
|
case Some(Relayed(origin)) =>
|
||||||
log.warning(s"origin channel ${origin.channelId} has disappeared in the meantime")
|
log.warning(s"origin channel ${origin.channelId} has disappeared in the meantime")
|
||||||
case Some(Local) =>
|
case Some(Local(sender)) =>
|
||||||
log.info(s"we were the origin payer for htlc #${fulfill.id}")
|
log.info(s"we were the origin payer for htlc #${fulfill.id}")
|
||||||
context.system.eventStream.publish(PaymentSent(self, add.paymentHash))
|
sender ! fulfill
|
||||||
case None =>
|
case None =>
|
||||||
log.warning(s"no origin found for htlc $add")
|
log.warning(s"no origin found for htlc ${fulfill.channelId}/${fulfill.id}")
|
||||||
}
|
}
|
||||||
|
|
||||||
case (add: UpdateAddHtlc, fail: UpdateFailHtlc) =>
|
case ForwardFail(fail) =>
|
||||||
bindings.get(add) match {
|
bindings.get(DownstreamHtlcId(fail.channelId, fail.id)) match {
|
||||||
case Some(Relayed(origin)) if upstreams.exists(_.channelId == origin.channelId) =>
|
case Some(Relayed(origin)) if channels.exists(_.channelId == origin.channelId) =>
|
||||||
val downstream = upstreams.find(_.channelId == origin.channelId).get.channel
|
val upstream = channels.find(_.channelId == origin.channelId).get.channel
|
||||||
downstream ! CMD_SIGN
|
|
||||||
// TODO: fix new String(fail.reason)
|
// TODO: fix new String(fail.reason)
|
||||||
downstream ! CMD_FAIL_HTLC(origin.id, new String(fail.reason))
|
upstream ! CMD_FAIL_HTLC(origin.id, new String(fail.reason), commit = true)
|
||||||
downstream ! CMD_SIGN
|
upstream ! CMD_SIGN
|
||||||
case Some(Relayed(origin)) =>
|
case Some(Relayed(origin)) =>
|
||||||
log.warning(s"origin channel ${origin.channelId} has disappeared in the meantime")
|
log.warning(s"origin channel ${origin.channelId} has disappeared in the meantime")
|
||||||
case Some(Local) =>
|
case Some(Local(sender)) =>
|
||||||
log.info(s"we were the origin payer for htlc #${fail.id}")
|
log.info(s"we were the origin payer for htlc #${fail.id}")
|
||||||
// TODO: fix new String(fail.reason)
|
sender ! fail
|
||||||
context.system.eventStream.publish(PaymentFailed(self, add.paymentHash, new String(fail.reason)))
|
|
||||||
case None =>
|
case None =>
|
||||||
log.warning(s"no origin found for htlc $add")
|
log.warning(s"no origin found for htlc ${fail.channelId}/${fail.id}")
|
||||||
}
|
}
|
||||||
|
|
||||||
case w@WatchEventSpent(BITCOIN_HTLC_SPENT, tx) =>
|
case w@WatchEventSpent(BITCOIN_HTLC_SPENT, tx) =>
|
||||||
@ -139,13 +140,13 @@ class Relayer(nodeSecret: PrivateKey, paymentHandler: ActorRef) extends Actor wi
|
|||||||
.flatten
|
.flatten
|
||||||
.map { preimage =>
|
.map { preimage =>
|
||||||
bindings.collect {
|
bindings.collect {
|
||||||
case b@(upstream, Relayed(downstream)) if downstream.paymentHash == sha256(preimage) =>
|
case b@(downstreamHtlcId, Relayed(upstream)) if upstream.paymentHash == sha256(preimage) =>
|
||||||
log.info(s"found a match between preimage=$preimage and origin htlc for $b")
|
log.info(s"found a match between preimage=$preimage and origin htlc for $b")
|
||||||
self ! (upstream, UpdateFulfillHtlc(upstream.channelId, upstream.id, preimage))
|
self ! ForwardFulfill(UpdateFulfillHtlc(downstreamHtlcId.channelId, downstreamHtlcId.htlcId, preimage))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'upstreams => sender ! upstreams
|
case 'channels => sender ! channels
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,9 +9,9 @@ import fr.acinq.eclair.wire.{UpdateAddHtlc, UpdateFailHtlc, UpdateFulfillHtlc, U
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
sealed trait Direction
|
sealed trait Direction { def opposite: Direction }
|
||||||
case object IN extends Direction
|
case object IN extends Direction { def opposite = OUT }
|
||||||
case object OUT extends Direction
|
case object OUT extends Direction { def opposite = IN }
|
||||||
// @formatter:on
|
// @formatter:on
|
||||||
|
|
||||||
case class Htlc(direction: Direction, add: UpdateAddHtlc, val previousChannelId: Option[BinaryData])
|
case class Htlc(direction: Direction, add: UpdateAddHtlc, val previousChannelId: Option[BinaryData])
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<appender name="CONSOLEWARN" class="ch.qos.logback.core.ConsoleAppender">
|
<!--appender name="CONSOLEWARN" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
<target>System.out</target>
|
<target>System.out</target>
|
||||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||||
<level>WARN</level>
|
<level>WARN</level>
|
||||||
@ -16,7 +16,7 @@
|
|||||||
<encoder>
|
<encoder>
|
||||||
<pattern>%-5level %X{akkaSource} - %msg%ex{12}%n</pattern>
|
<pattern>%-5level %X{akkaSource} - %msg%ex{12}%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender-->
|
||||||
|
|
||||||
<appender name="FILE" class="ch.qos.logback.core.FileAppender">
|
<appender name="FILE" class="ch.qos.logback.core.FileAppender">
|
||||||
<file>eclair.log</file>
|
<file>eclair.log</file>
|
||||||
@ -26,17 +26,13 @@
|
|||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<logger name="fr.acinq.eclair.Pipe" level="INFO">
|
<logger name="fr.acinq.eclair.Pipe" level="DEBUG">
|
||||||
<appender-ref ref="CONSOLE"/>
|
|
||||||
</logger>
|
|
||||||
|
|
||||||
<logger name="fr.acinq.eclair.channel.states.NormalOfflineFuzzySpec" level="INFO">
|
|
||||||
<appender-ref ref="CONSOLE"/>
|
<appender-ref ref="CONSOLE"/>
|
||||||
</logger>
|
</logger>
|
||||||
|
|
||||||
<root level="INFO">
|
<root level="INFO">
|
||||||
<!--appender-ref ref="FILE"/-->
|
<!--appender-ref ref="FILE"/>
|
||||||
<!--appender-ref ref="CONSOLEWARN"/-->
|
<appender-ref ref="CONSOLEWARN"/-->
|
||||||
<appender-ref ref="CONSOLE"/>
|
<appender-ref ref="CONSOLE"/>
|
||||||
</root>
|
</root>
|
||||||
|
|
||||||
|
@ -0,0 +1,128 @@
|
|||||||
|
package fr.acinq.eclair.channel.states
|
||||||
|
|
||||||
|
import akka.actor.{ActorRef, Cancellable, Props}
|
||||||
|
import akka.testkit.{TestFSMRef, TestProbe}
|
||||||
|
import fr.acinq.bitcoin.BinaryData
|
||||||
|
import fr.acinq.eclair.TestConstants.{Alice, Bob}
|
||||||
|
import fr.acinq.eclair._
|
||||||
|
import fr.acinq.eclair.blockchain._
|
||||||
|
import fr.acinq.eclair.channel.{Data, State, _}
|
||||||
|
import fr.acinq.eclair.payment._
|
||||||
|
import fr.acinq.eclair.router.Hop
|
||||||
|
import fr.acinq.eclair.wire._
|
||||||
|
import org.junit.runner.RunWith
|
||||||
|
import org.scalatest.junit.JUnitRunner
|
||||||
|
|
||||||
|
import scala.collection.immutable.Nil
|
||||||
|
import scala.concurrent.duration._
|
||||||
|
import scala.util.Random
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created by PM on 05/07/2016.
|
||||||
|
*/
|
||||||
|
@RunWith(classOf[JUnitRunner])
|
||||||
|
class FuzzySpec extends TestkitBaseClass with StateTestsHelperMethods {
|
||||||
|
|
||||||
|
type FixtureParam = Tuple7[TestFSMRef[State, Data, Channel], TestFSMRef[State, Data, Channel], ActorRef, ActorRef, ActorRef, ActorRef, ActorRef]
|
||||||
|
|
||||||
|
override def withFixture(test: OneArgTest) = {
|
||||||
|
val pipe = system.actorOf(Props(new Pipe()))
|
||||||
|
val alice2blockchain = TestProbe()
|
||||||
|
val blockchainA = system.actorOf(Props(new PeerWatcher(new TestBitcoinClient())))
|
||||||
|
val bob2blockchain = TestProbe()
|
||||||
|
val paymentHandlerA = system.actorOf(Props(new LocalPaymentHandler()), name = "payment-handler-a")
|
||||||
|
val paymentHandlerB = system.actorOf(Props(new LocalPaymentHandler()), name = "payment-handler-b")
|
||||||
|
val relayerA = system.actorOf(Relayer.props(Globals.Node.privateKey, paymentHandlerA), "relayer-a")
|
||||||
|
val relayerB = system.actorOf(Relayer.props(Globals.Node.privateKey, paymentHandlerB), "relayer-b")
|
||||||
|
val router = TestProbe()
|
||||||
|
val alice: TestFSMRef[State, Data, Channel] = TestFSMRef(new Channel(pipe, alice2blockchain.ref, router.ref, relayerA))
|
||||||
|
val bob: TestFSMRef[State, Data, Channel] = TestFSMRef(new Channel(pipe, bob2blockchain.ref, router.ref, relayerB))
|
||||||
|
within(30 seconds) {
|
||||||
|
val aliceInit = Init(Alice.channelParams.globalFeatures, Alice.channelParams.localFeatures)
|
||||||
|
val bobInit = Init(Bob.channelParams.globalFeatures, Bob.channelParams.localFeatures)
|
||||||
|
relayerA ! alice
|
||||||
|
relayerB ! bob
|
||||||
|
alice ! INPUT_INIT_FUNDER(Bob.id, 0, TestConstants.fundingSatoshis, TestConstants.pushMsat, Alice.channelParams, bobInit)
|
||||||
|
bob ! INPUT_INIT_FUNDEE(Alice.id, 0, Bob.channelParams, aliceInit)
|
||||||
|
pipe ! (alice, bob)
|
||||||
|
alice2blockchain.expectMsgType[MakeFundingTx]
|
||||||
|
alice2blockchain.forward(blockchainA)
|
||||||
|
alice2blockchain.expectMsgType[WatchSpent]
|
||||||
|
alice2blockchain.expectMsgType[WatchConfirmed]
|
||||||
|
alice2blockchain.forward(blockchainA)
|
||||||
|
alice2blockchain.expectMsgType[PublishAsap]
|
||||||
|
alice2blockchain.forward(blockchainA)
|
||||||
|
bob2blockchain.expectMsgType[WatchSpent]
|
||||||
|
bob2blockchain.expectMsgType[WatchConfirmed]
|
||||||
|
bob ! WatchEventConfirmed(BITCOIN_FUNDING_DEPTHOK, 400000, 42)
|
||||||
|
alice2blockchain.expectMsgType[WatchLost]
|
||||||
|
bob2blockchain.expectMsgType[WatchLost]
|
||||||
|
awaitCond(alice.stateName == NORMAL)
|
||||||
|
awaitCond(bob.stateName == NORMAL)
|
||||||
|
}
|
||||||
|
test((alice, bob, pipe, relayerA, relayerB, paymentHandlerA, paymentHandlerB))
|
||||||
|
}
|
||||||
|
|
||||||
|
def buildCmdAdd(paymentHash: BinaryData) = {
|
||||||
|
val channelUpdate_ab = ChannelUpdate("00" * 64, 0, 0, "0000", cltvExpiryDelta = 4, feeBaseMsat = 642000, feeProportionalMillionths = 7, htlcMinimumMsat = 0)
|
||||||
|
val hops = Hop(Globals.Node.publicKey, Globals.Node.publicKey, channelUpdate_ab) :: Nil
|
||||||
|
// we don't want to be below htlcMinimumMsat
|
||||||
|
val amount = Random.nextInt(1000000) + 1000
|
||||||
|
PaymentLifecycle.buildCommand(amount, paymentHash, hops, 444000)
|
||||||
|
}
|
||||||
|
|
||||||
|
def gatling(parallel: Int, total: Int, channel: TestFSMRef[State, Data, Channel], paymentHandler: ActorRef): Unit = {
|
||||||
|
for (i <- 0 until total / parallel) {
|
||||||
|
// we don't want to be above maxHtlcValueInFlightMsat or maxAcceptedHtlcs
|
||||||
|
awaitCond(channel.stateData.asInstanceOf[DATA_NORMAL].commitments.localCommit.spec.htlcs.size < 10 && channel.stateData.asInstanceOf[DATA_NORMAL].commitments.remoteCommit.spec.htlcs.size < 10)
|
||||||
|
val senders = for (i <- 0 until parallel) yield TestProbe()
|
||||||
|
senders.foreach(_.send(paymentHandler, 'genh))
|
||||||
|
val paymentHashes = senders.map(_.expectMsgType[BinaryData])
|
||||||
|
val cmds = paymentHashes.map(buildCmdAdd(_))
|
||||||
|
senders.zip(cmds).foreach(x => x._1.send(channel, x._2))
|
||||||
|
val oks = senders.map(_.expectMsgType[String])
|
||||||
|
val fulfills = senders.map(_.expectMsgType[UpdateFulfillHtlc])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def randomDisconnect(initialPipe: ActorRef): Cancellable = {
|
||||||
|
import scala.concurrent.ExecutionContext.Implicits.global
|
||||||
|
var currentPipe = initialPipe
|
||||||
|
system.scheduler.schedule(3 seconds, 3 seconds) {
|
||||||
|
currentPipe ! INPUT_DISCONNECTED
|
||||||
|
val newPipe = system.actorOf(Props(new Pipe()))
|
||||||
|
system.scheduler.scheduleOnce(500 millis) {
|
||||||
|
currentPipe ! INPUT_RECONNECTED(newPipe)
|
||||||
|
currentPipe = newPipe
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test("fuzzy testing with only one party sending HTLCs") {
|
||||||
|
case (alice, bob, pipe, relayerA, relayerB, paymentHandlerA, paymentHandlerB) =>
|
||||||
|
val gatling1 = new Thread(new Runnable {
|
||||||
|
override def run(): Unit = gatling(5, 100, alice, paymentHandlerB)
|
||||||
|
})
|
||||||
|
gatling1.start()
|
||||||
|
val chaosMonkey = randomDisconnect(pipe)
|
||||||
|
gatling1.join()
|
||||||
|
chaosMonkey.cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("fuzzy testing with only both parties sending HTLCs") {
|
||||||
|
case (alice, bob, pipe, relayerA, relayerB, paymentHandlerA, paymentHandlerB) =>
|
||||||
|
val gatling1 = new Thread(new Runnable {
|
||||||
|
override def run(): Unit = gatling(4, 100, alice, paymentHandlerB)
|
||||||
|
})
|
||||||
|
gatling1.start()
|
||||||
|
val gatling2 = new Thread(new Runnable {
|
||||||
|
override def run(): Unit = gatling(4, 100, bob, paymentHandlerA)
|
||||||
|
})
|
||||||
|
gatling2.start()
|
||||||
|
val chaosMonkey = randomDisconnect(pipe)
|
||||||
|
gatling1.join()
|
||||||
|
gatling2.join()
|
||||||
|
chaosMonkey.cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,152 +0,0 @@
|
|||||||
package fr.acinq.eclair.channel.states
|
|
||||||
|
|
||||||
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
|
|
||||||
import akka.testkit.{TestFSMRef, TestProbe}
|
|
||||||
import fr.acinq.bitcoin.{BinaryData, Crypto}
|
|
||||||
import fr.acinq.eclair.TestConstants.{Alice, Bob}
|
|
||||||
import fr.acinq.eclair.blockchain._
|
|
||||||
import fr.acinq.eclair.channel.{Data, State, _}
|
|
||||||
import fr.acinq.eclair.wire._
|
|
||||||
import fr.acinq.eclair.{Pipe, TestBitcoinClient, TestConstants, TestkitBaseClass}
|
|
||||||
import org.junit.runner.RunWith
|
|
||||||
import org.scalatest.junit.JUnitRunner
|
|
||||||
|
|
||||||
import scala.concurrent.duration._
|
|
||||||
import scala.util.Random
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Created by PM on 05/07/2016.
|
|
||||||
*/
|
|
||||||
@RunWith(classOf[JUnitRunner])
|
|
||||||
class NormalOfflineFuzzySpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|
||||||
|
|
||||||
type FixtureParam = Tuple5[TestFSMRef[State, Data, Channel], TestFSMRef[State, Data, Channel], ActorRef, ActorRef, ActorRef]
|
|
||||||
|
|
||||||
override def withFixture(test: OneArgTest) = {
|
|
||||||
val pipe = system.actorOf(Props(new Pipe()))
|
|
||||||
val alice2blockchain = TestProbe()
|
|
||||||
val blockchainA = system.actorOf(Props(new PeerWatcher(new TestBitcoinClient())))
|
|
||||||
val bob2blockchain = TestProbe()
|
|
||||||
val relayerA = system.actorOf(Props(new FuzzyRelayer(410000)))
|
|
||||||
val relayerB = system.actorOf(Props(new FuzzyRelayer(420000)))
|
|
||||||
val router = TestProbe()
|
|
||||||
val alice: TestFSMRef[State, Data, Channel] = TestFSMRef(new Channel(pipe, alice2blockchain.ref, router.ref, relayerA))
|
|
||||||
val bob: TestFSMRef[State, Data, Channel] = TestFSMRef(new Channel(pipe, bob2blockchain.ref, router.ref, relayerB))
|
|
||||||
within(30 seconds) {
|
|
||||||
val aliceInit = Init(Alice.channelParams.globalFeatures, Alice.channelParams.localFeatures)
|
|
||||||
val bobInit = Init(Bob.channelParams.globalFeatures, Bob.channelParams.localFeatures)
|
|
||||||
relayerA ! alice
|
|
||||||
relayerB ! bob
|
|
||||||
alice ! INPUT_INIT_FUNDER(Bob.id, 0, TestConstants.fundingSatoshis, TestConstants.pushMsat, Alice.channelParams, bobInit)
|
|
||||||
bob ! INPUT_INIT_FUNDEE(Alice.id, 0, Bob.channelParams, aliceInit)
|
|
||||||
pipe ! (alice, bob)
|
|
||||||
alice2blockchain.expectMsgType[MakeFundingTx]
|
|
||||||
alice2blockchain.forward(blockchainA)
|
|
||||||
alice2blockchain.expectMsgType[WatchSpent]
|
|
||||||
alice2blockchain.expectMsgType[WatchConfirmed]
|
|
||||||
alice2blockchain.forward(blockchainA)
|
|
||||||
alice2blockchain.expectMsgType[PublishAsap]
|
|
||||||
alice2blockchain.forward(blockchainA)
|
|
||||||
bob2blockchain.expectMsgType[WatchSpent]
|
|
||||||
bob2blockchain.expectMsgType[WatchConfirmed]
|
|
||||||
bob ! WatchEventConfirmed(BITCOIN_FUNDING_DEPTHOK, 400000, 42)
|
|
||||||
alice2blockchain.expectMsgType[WatchLost]
|
|
||||||
bob2blockchain.expectMsgType[WatchLost]
|
|
||||||
awaitCond(alice.stateName == NORMAL)
|
|
||||||
awaitCond(bob.stateName == NORMAL)
|
|
||||||
awaitCond(alice.stateName == NORMAL)
|
|
||||||
awaitCond(bob.stateName == NORMAL)
|
|
||||||
}
|
|
||||||
test((alice, bob, pipe, relayerA, relayerB))
|
|
||||||
}
|
|
||||||
|
|
||||||
class FuzzyRelayer(expiry: Int) extends Actor with ActorLogging {
|
|
||||||
|
|
||||||
val paymentpreimage = BinaryData("42" * 32)
|
|
||||||
val paymentHash = Crypto.sha256(paymentpreimage)
|
|
||||||
|
|
||||||
override def receive: Receive = {
|
|
||||||
case channel: ActorRef => context become ready(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
def ready(channel: ActorRef): Receive = {
|
|
||||||
case 'start => context become main(sender, channel, 0, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
def main(origin: ActorRef, channel: ActorRef, htlcSent: Int, htlcInFlight: Int): Receive = {
|
|
||||||
|
|
||||||
case htlc: UpdateAddHtlc =>
|
|
||||||
val preimage = BinaryData("42" * 32)
|
|
||||||
sender ! CMD_FULFILL_HTLC(htlc.id, preimage)
|
|
||||||
sender ! CMD_SIGN
|
|
||||||
|
|
||||||
case (add: UpdateAddHtlc, fulfill: UpdateFulfillHtlc) =>
|
|
||||||
if (htlcInFlight <= 1) {
|
|
||||||
if (htlcSent < 100) {
|
|
||||||
log.info(s"already sent $htlcSent, inFlight=${htlcInFlight - 1}")
|
|
||||||
self ! 'add
|
|
||||||
} else {
|
|
||||||
origin ! "done"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
context become main(origin, channel, htlcSent, htlcInFlight - 1)
|
|
||||||
|
|
||||||
case 'add =>
|
|
||||||
val cmds = for (i <- 0 to Random.nextInt(10)) yield CMD_ADD_HTLC(Random.nextInt(1000000) + 1000, paymentHash, 400144)
|
|
||||||
//val cmds = CMD_ADD_HTLC(Random.nextInt(1000000), paymentHash, expiry) :: Nil
|
|
||||||
cmds.foreach(channel ! _)
|
|
||||||
channel ! CMD_SIGN
|
|
||||||
context become main(origin, channel, htlcSent + cmds.size, htlcInFlight + cmds.size)
|
|
||||||
|
|
||||||
case "ok" => {}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
test("fuzzy testing with only one party sending HTLCs") {
|
|
||||||
case (alice, bob, pipe, relayerA, relayerB) =>
|
|
||||||
val sender = TestProbe()
|
|
||||||
sender.send(relayerA, 'start)
|
|
||||||
sender.send(relayerB, 'start)
|
|
||||||
relayerA ! 'add
|
|
||||||
import scala.concurrent.ExecutionContext.Implicits.global
|
|
||||||
var currentPipe = pipe
|
|
||||||
|
|
||||||
val task = system.scheduler.schedule(3 seconds, 3 seconds) {
|
|
||||||
currentPipe ! INPUT_DISCONNECTED
|
|
||||||
val newPipe = system.actorOf(Props(new Pipe()))
|
|
||||||
system.scheduler.scheduleOnce(500 millis) {
|
|
||||||
currentPipe ! INPUT_RECONNECTED(newPipe)
|
|
||||||
currentPipe = newPipe
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sender.expectMsg(10 minutes, "done")
|
|
||||||
task.cancel()
|
|
||||||
}
|
|
||||||
|
|
||||||
test("fuzzy testing in with both parties sending HTLCs") {
|
|
||||||
case (alice, bob, pipe, relayerA, relayerB) =>
|
|
||||||
val sender = TestProbe()
|
|
||||||
sender.send(relayerA, 'start)
|
|
||||||
sender.send(relayerB, 'start)
|
|
||||||
relayerA ! 'add
|
|
||||||
relayerB ! 'add
|
|
||||||
import scala.concurrent.ExecutionContext.Implicits.global
|
|
||||||
var currentPipe = pipe
|
|
||||||
|
|
||||||
val task = system.scheduler.schedule(3 seconds, 3 seconds) {
|
|
||||||
currentPipe ! INPUT_DISCONNECTED
|
|
||||||
val newPipe = system.actorOf(Props(new Pipe()))
|
|
||||||
system.scheduler.scheduleOnce(500 millis) {
|
|
||||||
currentPipe ! INPUT_RECONNECTED(newPipe)
|
|
||||||
currentPipe = newPipe
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sender.expectMsg(10 minutes, "done")
|
|
||||||
sender.expectMsg(10 minutes, "done")
|
|
||||||
task.cancel()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
@ -9,7 +9,7 @@ import fr.acinq.eclair.blockchain._
|
|||||||
import fr.acinq.eclair.blockchain.peer.CurrentBlockCount
|
import fr.acinq.eclair.blockchain.peer.CurrentBlockCount
|
||||||
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
|
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
|
||||||
import fr.acinq.eclair.channel.{Data, State, _}
|
import fr.acinq.eclair.channel.{Data, State, _}
|
||||||
import fr.acinq.eclair.payment.{Binding, Local, Relayed}
|
import fr.acinq.eclair.payment.{Bind, Local, Relayed}
|
||||||
import fr.acinq.eclair.transactions.{IN, OUT}
|
import fr.acinq.eclair.transactions.{IN, OUT}
|
||||||
import fr.acinq.eclair.wire.{ClosingSigned, CommitSig, Error, RevokeAndAck, Shutdown, UpdateAddHtlc, UpdateFailHtlc, UpdateFulfillHtlc}
|
import fr.acinq.eclair.wire.{ClosingSigned, CommitSig, Error, RevokeAndAck, Shutdown, UpdateAddHtlc, UpdateFailHtlc, UpdateFulfillHtlc}
|
||||||
import fr.acinq.eclair.{TestBitcoinClient, TestConstants, TestkitBaseClass}
|
import fr.acinq.eclair.{TestBitcoinClient, TestConstants, TestkitBaseClass}
|
||||||
@ -59,7 +59,7 @@ class NormalStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
localChanges = initialState.commitments.localChanges.copy(proposed = htlc :: Nil),
|
localChanges = initialState.commitments.localChanges.copy(proposed = htlc :: Nil),
|
||||||
unackedMessages = htlc :: Nil
|
unackedMessages = htlc :: Nil
|
||||||
)))
|
)))
|
||||||
relayer.expectMsg(Binding(htlc, origin = Local))
|
relayer.expectMsg(Bind(htlc, origin = Local(sender.ref)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ class NormalStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val h = BinaryData("00112233445566778899aabbccddeeff")
|
val h = BinaryData("00112233445566778899aabbccddeeff")
|
||||||
val originHtlc = UpdateAddHtlc(channelId = 4298564, id = 5656, amountMsat = 50000000, expiry = 400144, paymentHash = h, onionRoutingPacket = "00" * 1254)
|
val originHtlc = UpdateAddHtlc(channelId = 4298564, id = 5656, amountMsat = 50000000, expiry = 400144, paymentHash = h, onionRoutingPacket = "00" * 1254)
|
||||||
val cmd = CMD_ADD_HTLC(originHtlc.amountMsat - 10000, h, originHtlc.expiry - 7, origin = Relayed(originHtlc))
|
val cmd = CMD_ADD_HTLC(originHtlc.amountMsat - 10000, h, originHtlc.expiry - 7, upstream_opt = Some(originHtlc))
|
||||||
sender.send(alice, cmd)
|
sender.send(alice, cmd)
|
||||||
sender.expectMsg("ok")
|
sender.expectMsg("ok")
|
||||||
val htlc = alice2bob.expectMsgType[UpdateAddHtlc]
|
val htlc = alice2bob.expectMsgType[UpdateAddHtlc]
|
||||||
@ -93,7 +93,7 @@ class NormalStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
localNextHtlcId = 1,
|
localNextHtlcId = 1,
|
||||||
localChanges = initialState.commitments.localChanges.copy(proposed = htlc :: Nil),
|
localChanges = initialState.commitments.localChanges.copy(proposed = htlc :: Nil),
|
||||||
unackedMessages = htlc :: Nil)))
|
unackedMessages = htlc :: Nil)))
|
||||||
relayer.expectMsg(Binding(htlc, origin = Relayed(originHtlc)))
|
relayer.expectMsg(Bind(htlc, origin = Relayed(originHtlc)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -677,7 +677,7 @@ class NormalStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
val initialState = bob.stateData.asInstanceOf[DATA_NORMAL]
|
val initialState = bob.stateData.asInstanceOf[DATA_NORMAL]
|
||||||
|
|
||||||
sender.send(bob, CMD_FULFILL_HTLC(42, r))
|
sender.send(bob, CMD_FULFILL_HTLC(42, r))
|
||||||
sender.expectMsg("requirement failed: unknown htlc id=42")
|
sender.expectMsg("unknown htlc id=42")
|
||||||
assert(initialState == bob.stateData)
|
assert(initialState == bob.stateData)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,17 +119,16 @@ class OfflineStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
// => A expects rev2 but it will first receive rev1
|
// => A expects rev2 but it will first receive rev1
|
||||||
val comm_a1_1 = comm_a
|
val comm_a1_1 = comm_a
|
||||||
// A ignores rev1
|
// A ignores rev1
|
||||||
assert(Commitments.isOldRevocation(comm_a1_1, ba_rev_1) === true)
|
assert(Commitments.receiveRevocation(comm_a1_1, ba_rev_1).isLeft)
|
||||||
// since A sent back sig2 so b replies with rev2
|
// since A sent back sig2 so b replies with rev2
|
||||||
val comm_a1_2 = Commitments.receiveRevocation(comm_a1_1, ba_rev_2)
|
val comm_a1_2 = Commitments.receiveRevocation(comm_a1_1, ba_rev_2).right.get
|
||||||
assert(comm_a1_2.unackedMessages.map(Commitments.msg2String(_)).mkString(" ") === "")
|
assert(comm_a1_2.unackedMessages.map(Commitments.msg2String(_)).mkString(" ") === "")
|
||||||
|
|
||||||
// SCENARIO A2: B did receive sig2
|
// SCENARIO A2: B did receive sig2
|
||||||
// => A expects rev2 and will receive it
|
// => A expects rev2 and will receive it
|
||||||
val comm_a2_1 = comm_a
|
val comm_a2_1 = comm_a
|
||||||
// a will first receive sig2
|
// a will first receive sig2
|
||||||
assert(Commitments.isOldRevocation(comm_a2_1, ba_rev_2) === false)
|
val comm_a2_2 = Commitments.receiveRevocation(comm_a2_1, ba_rev_2).right.get
|
||||||
val comm_a2_2 = Commitments.receiveRevocation(comm_a2_1, ba_rev_2)
|
|
||||||
assert(comm_a2_2.unackedMessages.map(Commitments.msg2String(_)).mkString(" ") === "")
|
assert(comm_a2_2.unackedMessages.map(Commitments.msg2String(_)).mkString(" ") === "")
|
||||||
|
|
||||||
// SCENARIO B1: B did receive sig2
|
// SCENARIO B1: B did receive sig2
|
||||||
|
@ -97,7 +97,7 @@ class ShutdownStateSpec extends TestkitBaseClass with StateTestsHelperMethods {
|
|||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val initialState = bob.stateData.asInstanceOf[DATA_SHUTDOWN]
|
val initialState = bob.stateData.asInstanceOf[DATA_SHUTDOWN]
|
||||||
sender.send(bob, CMD_FULFILL_HTLC(42, "12" * 32))
|
sender.send(bob, CMD_FULFILL_HTLC(42, "12" * 32))
|
||||||
sender.expectMsg("requirement failed: unknown htlc id=42")
|
sender.expectMsg("unknown htlc id=42")
|
||||||
assert(initialState == bob.stateData)
|
assert(initialState == bob.stateData)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import akka.actor.Status.Failure
|
|||||||
import akka.testkit.TestProbe
|
import akka.testkit.TestProbe
|
||||||
import fr.acinq.eclair.Globals
|
import fr.acinq.eclair.Globals
|
||||||
import fr.acinq.eclair.router.BaseRouterSpec
|
import fr.acinq.eclair.router.BaseRouterSpec
|
||||||
|
import fr.acinq.eclair.wire.{UpdateFailHtlc, UpdateFulfillHtlc}
|
||||||
import org.junit.runner.RunWith
|
import org.junit.runner.RunWith
|
||||||
import org.scalatest.junit.JUnitRunner
|
import org.scalatest.junit.JUnitRunner
|
||||||
|
|
||||||
@ -46,7 +47,7 @@ class PaymentLifecycleSpec extends BaseRouterSpec {
|
|||||||
val Transition(_, WAITING_FOR_REQUEST, WAITING_FOR_ROUTE) = monitor.expectMsgClass(classOf[Transition[_]])
|
val Transition(_, WAITING_FOR_REQUEST, WAITING_FOR_ROUTE) = monitor.expectMsgClass(classOf[Transition[_]])
|
||||||
val Transition(_, WAITING_FOR_ROUTE, WAITING_FOR_PAYMENT_COMPLETE) = monitor.expectMsgClass(classOf[Transition[_]])
|
val Transition(_, WAITING_FOR_ROUTE, WAITING_FOR_PAYMENT_COMPLETE) = monitor.expectMsgClass(classOf[Transition[_]])
|
||||||
|
|
||||||
sender.send(paymentFSM, PaymentFailed(null, request.paymentHash, "some reason"))
|
sender.send(paymentFSM, UpdateFailHtlc(0, 0, "some reason".getBytes))
|
||||||
|
|
||||||
val res = sender.expectMsgType[Failure]
|
val res = sender.expectMsgType[Failure]
|
||||||
assert(res.cause.getMessage === "some reason")
|
assert(res.cause.getMessage === "some reason")
|
||||||
@ -56,6 +57,8 @@ class PaymentLifecycleSpec extends BaseRouterSpec {
|
|||||||
val paymentFSM = system.actorOf(PaymentLifecycle.props(a, router))
|
val paymentFSM = system.actorOf(PaymentLifecycle.props(a, router))
|
||||||
val monitor = TestProbe()
|
val monitor = TestProbe()
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
|
val eventListener = TestProbe()
|
||||||
|
system.eventStream.subscribe(eventListener.ref, classOf[PaymentEvent])
|
||||||
|
|
||||||
paymentFSM ! SubscribeTransitionCallBack(monitor.ref)
|
paymentFSM ! SubscribeTransitionCallBack(monitor.ref)
|
||||||
val CurrentState(_, WAITING_FOR_REQUEST) = monitor.expectMsgClass(classOf[CurrentState[_]])
|
val CurrentState(_, WAITING_FOR_REQUEST) = monitor.expectMsgClass(classOf[CurrentState[_]])
|
||||||
@ -65,10 +68,11 @@ class PaymentLifecycleSpec extends BaseRouterSpec {
|
|||||||
val Transition(_, WAITING_FOR_REQUEST, WAITING_FOR_ROUTE) = monitor.expectMsgClass(classOf[Transition[_]])
|
val Transition(_, WAITING_FOR_REQUEST, WAITING_FOR_ROUTE) = monitor.expectMsgClass(classOf[Transition[_]])
|
||||||
val Transition(_, WAITING_FOR_ROUTE, WAITING_FOR_PAYMENT_COMPLETE) = monitor.expectMsgClass(classOf[Transition[_]])
|
val Transition(_, WAITING_FOR_ROUTE, WAITING_FOR_PAYMENT_COMPLETE) = monitor.expectMsgClass(classOf[Transition[_]])
|
||||||
|
|
||||||
sender.send(paymentFSM, PaymentSent(null, request.paymentHash))
|
sender.send(paymentFSM, UpdateFulfillHtlc(0, 0, "42" * 32))
|
||||||
|
|
||||||
val res = sender.expectMsgType[String]
|
val res = sender.expectMsgType[String]
|
||||||
assert(res === "sent")
|
assert(res === "sent")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -24,17 +24,15 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
// let's reuse the existing test data
|
// let's reuse the existing test data
|
||||||
import HtlcGenerationSpec._
|
import HtlcGenerationSpec._
|
||||||
|
|
||||||
type FixtureParam = Tuple3[ActorRef, TestProbe, TestProbe]
|
type FixtureParam = Tuple2[ActorRef, TestProbe]
|
||||||
|
|
||||||
override def withFixture(test: OneArgTest) = {
|
override def withFixture(test: OneArgTest) = {
|
||||||
|
|
||||||
within(30 seconds) {
|
within(30 seconds) {
|
||||||
val paymentHandler = TestProbe()
|
val paymentHandler = TestProbe()
|
||||||
val eventListener = TestProbe()
|
|
||||||
system.eventStream.subscribe(eventListener.ref, classOf[PaymentEvent])
|
|
||||||
// we are node B in the route A -> B -> C -> ....
|
// we are node B in the route A -> B -> C -> ....
|
||||||
val relayer = system.actorOf(Relayer.props(priv_b, paymentHandler.ref))
|
val relayer = system.actorOf(Relayer.props(priv_b, paymentHandler.ref))
|
||||||
test((relayer, paymentHandler, eventListener))
|
test((relayer, paymentHandler))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -44,47 +42,49 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
val channelId_ab = 981408633
|
val channelId_ab = 981408633
|
||||||
val channelId_bc = 237534
|
val channelId_bc = 237534
|
||||||
|
|
||||||
test("add a channel") { case (relayer, _, _) =>
|
test("add a channel") { case (relayer, _) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, 'upstreams)
|
sender.send(relayer, 'channels)
|
||||||
val upstreams = sender.expectMsgType[Set[OutgoingChannel]]
|
val upstreams = sender.expectMsgType[Set[OutgoingChannel]]
|
||||||
assert(upstreams === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
assert(upstreams === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
||||||
}
|
}
|
||||||
|
|
||||||
test("remove a channel (mutual close)") { case (relayer, _, eventListener) =>
|
test("remove a channel (mutual close)") { case (relayer, _) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, 'upstreams)
|
sender.send(relayer, 'channels)
|
||||||
val upstreams1 = sender.expectMsgType[Set[OutgoingChannel]]
|
val upstreams1 = sender.expectMsgType[Set[OutgoingChannel]]
|
||||||
assert(upstreams1 === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
assert(upstreams1 === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, SHUTDOWN, NEGOTIATING, DATA_NEGOTIATING(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null, null, null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, SHUTDOWN, NEGOTIATING, DATA_NEGOTIATING(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null, null, null)))
|
||||||
sender.send(relayer, 'upstreams)
|
sender.send(relayer, 'channels)
|
||||||
val upstreams2 = sender.expectMsgType[Set[OutgoingChannel]]
|
val upstreams2 = sender.expectMsgType[Set[OutgoingChannel]]
|
||||||
assert(upstreams2 === Set.empty)
|
assert(upstreams2 === Set.empty)
|
||||||
}
|
}
|
||||||
|
|
||||||
test("remove a channel (unilateral close)") { case (relayer, _, eventListener) =>
|
test("remove a channel (unilateral close)") { case (relayer, _) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, 'upstreams)
|
sender.send(relayer, 'channels)
|
||||||
val upstreams1 = sender.expectMsgType[Set[OutgoingChannel]]
|
val upstreams1 = sender.expectMsgType[Set[OutgoingChannel]]
|
||||||
assert(upstreams1 === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
assert(upstreams1 === Set(OutgoingChannel(channelId_bc, channel_bc.ref, nodeId_c.hash160)))
|
||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, NORMAL, CLOSING, DATA_CLOSING(Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), None, Some(null), None, None, Nil)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, NORMAL, CLOSING, DATA_CLOSING(Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), None, Some(null), None, None, Nil)))
|
||||||
sender.send(relayer, 'upstreams)
|
sender.send(relayer, 'channels)
|
||||||
val upstreams2 = sender.expectMsgType[Set[OutgoingChannel]]
|
val upstreams2 = sender.expectMsgType[Set[OutgoingChannel]]
|
||||||
assert(upstreams2 === Set.empty)
|
assert(upstreams2 === Set.empty)
|
||||||
}
|
}
|
||||||
|
|
||||||
test("send an event when we receive a payment") { case (relayer, paymentHandler, eventListener) =>
|
test("send an event when we receive a payment") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
|
val eventListener = TestProbe()
|
||||||
|
system.eventStream.subscribe(eventListener.ref, classOf[PaymentEvent])
|
||||||
|
|
||||||
val add_ab = {
|
val add_ab = {
|
||||||
val cmd = buildCommand(finalAmountMsat, paymentHash, hops.take(1), currentBlockCount)
|
val cmd = buildCommand(finalAmountMsat, paymentHash, hops.take(1), currentBlockCount)
|
||||||
@ -92,7 +92,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
||||||
}
|
}
|
||||||
|
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
|
|
||||||
val add1 = paymentHandler.expectMsgType[UpdateAddHtlc]
|
val add1 = paymentHandler.expectMsgType[UpdateAddHtlc]
|
||||||
eventListener.expectMsgType[PaymentReceived]
|
eventListener.expectMsgType[PaymentReceived]
|
||||||
@ -101,7 +101,8 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("relay an htlc-add") { case (relayer, paymentHandler, eventListener) =>
|
|
||||||
|
test("relay an htlc-add") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
|
|
||||||
@ -112,17 +113,17 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
|
|
||||||
sender.expectNoMsg(1 second)
|
sender.expectNoMsg(1 second)
|
||||||
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
||||||
paymentHandler.expectNoMsg(1 second)
|
paymentHandler.expectNoMsg(1 second)
|
||||||
|
|
||||||
assert(cmd_bc.origin === Relayed(add_ab))
|
assert(cmd_bc.upstream_opt === Some(add_ab))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("fail to relay an htlc-add when there is no available upstream channel") { case (relayer, paymentHandler, _) =>
|
test("fail to relay an htlc-add when there is no available upstream channel") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
|
|
||||||
@ -132,7 +133,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
||||||
}
|
}
|
||||||
|
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
|
|
||||||
val fail = sender.expectMsgType[CMD_FAIL_HTLC]
|
val fail = sender.expectMsgType[CMD_FAIL_HTLC]
|
||||||
channel_bc.expectNoMsg(1 second)
|
channel_bc.expectNoMsg(1 second)
|
||||||
@ -142,7 +143,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("fail to relay an htlc-add when the onion is malformed") { case (relayer, paymentHandler, _) =>
|
test("fail to relay an htlc-add when the onion is malformed") { case (relayer, paymentHandler) =>
|
||||||
|
|
||||||
// TODO: we should use the new update_fail_malformed_htlc message (see BOLT 2)
|
// TODO: we should use the new update_fail_malformed_htlc message (see BOLT 2)
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
@ -154,7 +155,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, "00" * 1254)
|
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, "00" * 1254)
|
||||||
}
|
}
|
||||||
|
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
|
|
||||||
val fail = sender.expectMsgType[CMD_FAIL_HTLC]
|
val fail = sender.expectMsgType[CMD_FAIL_HTLC]
|
||||||
channel_bc.expectNoMsg(1 second)
|
channel_bc.expectNoMsg(1 second)
|
||||||
@ -164,7 +165,7 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("relay an htlc-fulfill") { case (relayer, paymentHandler, eventListener) =>
|
test("relay an htlc-fulfill") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_ab = TestProbe()
|
val channel_ab = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
@ -177,46 +178,21 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
||||||
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
||||||
sender.send(relayer, Binding(add_bc, Relayed(add_ab)))
|
sender.send(relayer, Bind(add_bc, Relayed(add_ab)))
|
||||||
// preimage is wrong, does not matter here
|
// preimage is wrong, does not matter here
|
||||||
val fulfill_cb = UpdateFulfillHtlc(channelId = add_bc.channelId, id = add_bc.id, paymentPreimage = "00" * 32)
|
val fulfill_cb = UpdateFulfillHtlc(channelId = add_bc.channelId, id = add_bc.id, paymentPreimage = "00" * 32)
|
||||||
sender.send(relayer, (add_bc, fulfill_cb))
|
sender.send(relayer, ForwardFulfill(fulfill_cb))
|
||||||
|
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
val fulfill_ba = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
val fulfill_ba = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
eventListener.expectNoMsg(1 second)
|
|
||||||
|
|
||||||
assert(fulfill_ba.id === add_ab.id)
|
assert(fulfill_ba.id === add_ab.id)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("send an event when we receive an htlc-fulfill and we were the initiator") { case (relayer, paymentHandler, eventListener) =>
|
test("relay an htlc-fail") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
|
||||||
val channel_ab = TestProbe()
|
|
||||||
val channel_bc = TestProbe()
|
|
||||||
|
|
||||||
// note we simulate this by not having a binding for this channel
|
|
||||||
|
|
||||||
val add_ab = {
|
|
||||||
val cmd = buildCommand(finalAmountMsat, paymentHash, hops.take(1), currentBlockCount)
|
|
||||||
// and then manually build an htlc
|
|
||||||
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
|
||||||
}
|
|
||||||
sender.send(relayer, Binding(add_ab, Local))
|
|
||||||
// preimage is wrong, does not matter here
|
|
||||||
val fulfill_cb = UpdateFulfillHtlc(channelId = add_ab.channelId, id = add_ab.id, paymentPreimage = "00" * 32)
|
|
||||||
sender.send(relayer, (add_ab, fulfill_cb))
|
|
||||||
|
|
||||||
channel_ab.expectNoMsg(1 second)
|
|
||||||
eventListener.expectMsgType[PaymentSent]
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
test("relay an htlc-fail") { case (relayer, paymentHandler, eventListener) =>
|
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_ab = TestProbe()
|
val channel_ab = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
@ -229,44 +205,20 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
||||||
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
||||||
sender.send(relayer, Binding(add_bc, Relayed(add_ab)))
|
sender.send(relayer, Bind(add_bc, Relayed(add_ab)))
|
||||||
val fail_cb = UpdateFailHtlc(channelId = add_bc.channelId, id = add_bc.id, reason = "some reason".getBytes())
|
val fail_cb = UpdateFailHtlc(channelId = add_bc.channelId, id = add_bc.id, reason = "some reason".getBytes())
|
||||||
sender.send(relayer, (add_bc, fail_cb))
|
sender.send(relayer, ForwardFail(fail_cb))
|
||||||
|
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
val fulfill_ba = channel_ab.expectMsgType[CMD_FAIL_HTLC]
|
val fulfill_ba = channel_ab.expectMsgType[CMD_FAIL_HTLC]
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
eventListener.expectNoMsg(1 second)
|
|
||||||
|
|
||||||
assert(fulfill_ba.id === add_ab.id)
|
assert(fulfill_ba.id === add_ab.id)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("send an event when we receive an htlc-fail and we were the initiator") { case (relayer, paymentHandler, eventListener) =>
|
test("extract a payment preimage from an onchain tx (extract from witnessHtlcSuccess script)") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
|
||||||
val channel_ab = TestProbe()
|
|
||||||
val channel_bc = TestProbe()
|
|
||||||
|
|
||||||
// note we simulate this by not having a binding for this channel
|
|
||||||
|
|
||||||
val add_ab = {
|
|
||||||
val cmd = buildCommand(finalAmountMsat, paymentHash, hops, currentBlockCount)
|
|
||||||
// and then manually build an htlc
|
|
||||||
UpdateAddHtlc(channelId = channelId_ab, id = 123456, cmd.amountMsat, cmd.expiry, cmd.paymentHash, cmd.onion)
|
|
||||||
}
|
|
||||||
sender.send(relayer, Binding(add_ab, Local))
|
|
||||||
val fail_cb = UpdateFailHtlc(channelId = add_ab.channelId, id = add_ab.id, reason = "some reason".getBytes())
|
|
||||||
sender.send(relayer, (add_ab, fail_cb))
|
|
||||||
|
|
||||||
channel_ab.expectNoMsg(1 second)
|
|
||||||
eventListener.expectMsgType[PaymentFailed]
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
test("extract a payment preimage from an onchain tx (extract from witnessHtlcSuccess script)") { case (relayer, paymentHandler, eventListener) =>
|
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_ab = TestProbe()
|
val channel_ab = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
@ -279,23 +231,21 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
||||||
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
||||||
sender.send(relayer, Binding(add_bc, Relayed(add_ab)))
|
sender.send(relayer, Bind(add_bc, Relayed(add_ab)))
|
||||||
|
|
||||||
// actual test starts here
|
// actual test starts here
|
||||||
val tx = Transaction(version = 0, txIn = TxIn(outPoint = OutPoint("22" * 32, 0), signatureScript = "", sequence = 0, witness = Scripts.witnessHtlcSuccess("11" * 70, "22" * 70, paymentPreimage, "33" * 130)) :: Nil, txOut = Nil, lockTime = 0)
|
val tx = Transaction(version = 0, txIn = TxIn(outPoint = OutPoint("22" * 32, 0), signatureScript = "", sequence = 0, witness = Scripts.witnessHtlcSuccess("11" * 70, "22" * 70, paymentPreimage, "33" * 130)) :: Nil, txOut = Nil, lockTime = 0)
|
||||||
sender.send(relayer, WatchEventSpent(BITCOIN_HTLC_SPENT, tx))
|
sender.send(relayer, WatchEventSpent(BITCOIN_HTLC_SPENT, tx))
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
val cmd_ab = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
val cmd_ab = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
|
|
||||||
assert(cmd_ab.id === add_ab.id)
|
assert(cmd_ab.id === add_ab.id)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("extract a payment preimage from an onchain tx (extract from witnessClaimHtlcSuccessFromCommitTx script)") { case (relayer, paymentHandler, eventListener) =>
|
test("extract a payment preimage from an onchain tx (extract from witnessClaimHtlcSuccessFromCommitTx script)") { case (relayer, paymentHandler) =>
|
||||||
val sender = TestProbe()
|
val sender = TestProbe()
|
||||||
val channel_ab = TestProbe()
|
val channel_ab = TestProbe()
|
||||||
val channel_bc = TestProbe()
|
val channel_bc = TestProbe()
|
||||||
@ -308,17 +258,15 @@ class RelayerSpec extends TestkitBaseClass {
|
|||||||
|
|
||||||
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
sender.send(relayer, ChannelChangedState(channel_ab.ref, null, nodeId_a, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_ab), null)))
|
||||||
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
sender.send(relayer, ChannelChangedState(channel_bc.ref, null, nodeId_c, WAIT_FOR_FUNDING_LOCKED, NORMAL, DATA_NORMAL(null, Commitments(null, null, null, null, null, null, 0, 0, null, null, null, null, channelId_bc), null)))
|
||||||
sender.send(relayer, add_ab)
|
sender.send(relayer, ForwardAdd(add_ab))
|
||||||
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
val cmd_bc = channel_bc.expectMsgType[CMD_ADD_HTLC]
|
||||||
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
val add_bc = UpdateAddHtlc(channelId = channelId_bc, id = 987451, amountMsat = cmd_bc.amountMsat, expiry = cmd_bc.expiry, paymentHash = cmd_bc.paymentHash, onionRoutingPacket = cmd_bc.onion)
|
||||||
sender.send(relayer, Binding(add_bc, Relayed(add_ab)))
|
sender.send(relayer, Bind(add_bc, Relayed(add_ab)))
|
||||||
|
|
||||||
// actual test starts here
|
// actual test starts here
|
||||||
val tx = Transaction(version = 0, txIn = TxIn(outPoint = OutPoint("22" * 32, 0), signatureScript = "", sequence = 0, witness = Scripts.witnessClaimHtlcSuccessFromCommitTx("11" * 70, paymentPreimage, "33" * 130)) :: Nil, txOut = Nil, lockTime = 0)
|
val tx = Transaction(version = 0, txIn = TxIn(outPoint = OutPoint("22" * 32, 0), signatureScript = "", sequence = 0, witness = Scripts.witnessClaimHtlcSuccessFromCommitTx("11" * 70, paymentPreimage, "33" * 130)) :: Nil, txOut = Nil, lockTime = 0)
|
||||||
sender.send(relayer, WatchEventSpent(BITCOIN_HTLC_SPENT, tx))
|
sender.send(relayer, WatchEventSpent(BITCOIN_HTLC_SPENT, tx))
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
val cmd_ab = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
val cmd_ab = channel_ab.expectMsgType[CMD_FULFILL_HTLC]
|
||||||
channel_ab.expectMsg(CMD_SIGN)
|
|
||||||
|
|
||||||
assert(cmd_ab.id === add_ab.id)
|
assert(cmd_ab.id === add_ab.id)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user