mirror of
https://github.com/btcsuite/btcd.git
synced 2024-11-19 18:00:11 +01:00
Merge pull request #1894 from Roasbeef/musig2-1-0
btcec/schnorr/musig2: update to musig 1.0.0
This commit is contained in:
commit
2cc19083f2
@ -6,6 +6,11 @@ require (
|
|||||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1
|
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1
|
||||||
github.com/davecgh/go-spew v1.1.1
|
github.com/davecgh/go-spew v1.1.1
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1
|
||||||
|
github.com/stretchr/testify v1.8.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect
|
require (
|
||||||
|
github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
||||||
|
13
btcec/go.sum
13
btcec/go.sum
@ -1,8 +1,21 @@
|
|||||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
|
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
|
||||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc=
|
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
|
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
|
||||||
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
@ -206,12 +206,7 @@ func NewContext(signingKey *btcec.PrivateKey, shouldSort bool,
|
|||||||
option(opts)
|
option(opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
pubKey, err := schnorr.ParsePubKey(
|
pubKey := signingKey.PubKey()
|
||||||
schnorr.SerializePubKey(signingKey.PubKey()),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx := &Context{
|
ctx := &Context{
|
||||||
signingKey: signingKey,
|
signingKey: signingKey,
|
||||||
@ -243,7 +238,10 @@ func NewContext(signingKey *btcec.PrivateKey, shouldSort bool,
|
|||||||
// the nonce now to pass in to the session once all the callers
|
// the nonce now to pass in to the session once all the callers
|
||||||
// are known.
|
// are known.
|
||||||
if opts.earlyNonce {
|
if opts.earlyNonce {
|
||||||
ctx.sessionNonce, err = GenNonces()
|
var err error
|
||||||
|
ctx.sessionNonce, err = GenNonces(
|
||||||
|
WithNonceSecretKeyAux(signingKey),
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
88
btcec/schnorr/musig2/data/key_agg_vectors.json
Normal file
88
btcec/schnorr/musig2/data/key_agg_vectors.json
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
{
|
||||||
|
"pubkeys": [
|
||||||
|
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||||
|
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||||
|
"020000000000000000000000000000000000000000000000000000000000000005",
|
||||||
|
"02FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30",
|
||||||
|
"04F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9"
|
||||||
|
],
|
||||||
|
"tweaks": [
|
||||||
|
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",
|
||||||
|
"252E4BD67410A76CDF933D30EAA1608214037F1B105A013ECCD3C5C184A6110B"
|
||||||
|
],
|
||||||
|
"valid_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"expected": "90539EEDE565F5D054F32CC0C220126889ED1E5D193BAF15AEF344FE59D4610C"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [2, 1, 0],
|
||||||
|
"expected": "6204DE8B083426DC6EAF9502D27024D53FC826BF7D2012148A0575435DF54B2B"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 0, 0],
|
||||||
|
"expected": "B436E3BAD62B8CD409969A224731C193D051162D8C5AE8B109306127DA3AA935"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 0, 1, 1],
|
||||||
|
"expected": "69BC22BFA5D106306E48A20679DE1D7389386124D07571D0D872686028C26A3E"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"error_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [0, 3],
|
||||||
|
"tweak_indices": [],
|
||||||
|
"is_xonly": [],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 1,
|
||||||
|
"contrib": "pubkey"
|
||||||
|
},
|
||||||
|
"comment": "Invalid public key"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 4],
|
||||||
|
"tweak_indices": [],
|
||||||
|
"is_xonly": [],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 1,
|
||||||
|
"contrib": "pubkey"
|
||||||
|
},
|
||||||
|
"comment": "Public key exceeds field size"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [5, 0],
|
||||||
|
"tweak_indices": [],
|
||||||
|
"is_xonly": [],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 0,
|
||||||
|
"contrib": "pubkey"
|
||||||
|
},
|
||||||
|
"comment": "First byte of public key is not 2 or 3"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 1],
|
||||||
|
"tweak_indices": [0],
|
||||||
|
"is_xonly": [true],
|
||||||
|
"error": {
|
||||||
|
"type": "value",
|
||||||
|
"message": "The tweak must be less than n."
|
||||||
|
},
|
||||||
|
"comment": "Tweak is out of range"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [6],
|
||||||
|
"tweak_indices": [1],
|
||||||
|
"is_xonly": [false],
|
||||||
|
"error": {
|
||||||
|
"type": "value",
|
||||||
|
"message": "The result of tweaking cannot be infinity."
|
||||||
|
},
|
||||||
|
"comment": "Intermediate tweaking result is point at infinity"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
btcec/schnorr/musig2/data/key_sort_vectors.json
Normal file
16
btcec/schnorr/musig2/data/key_sort_vectors.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"pubkeys": [
|
||||||
|
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||||
|
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||||
|
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||||
|
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8"
|
||||||
|
],
|
||||||
|
"sorted_pubkeys": [
|
||||||
|
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||||
|
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||||
|
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||||
|
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659"
|
||||||
|
]
|
||||||
|
}
|
54
btcec/schnorr/musig2/data/nonce_agg_vectors.json
Normal file
54
btcec/schnorr/musig2/data/nonce_agg_vectors.json
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"pnonces": [
|
||||||
|
"020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E66603BA47FBC1834437B3212E89A84D8425E7BF12E0245D98262268EBDCB385D50641",
|
||||||
|
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833",
|
||||||
|
"020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E6660279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||||
|
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60379BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||||
|
"04FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833",
|
||||||
|
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B831",
|
||||||
|
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A602FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30"
|
||||||
|
],
|
||||||
|
"valid_test_cases": [
|
||||||
|
{
|
||||||
|
"pnonce_indices": [0, 1],
|
||||||
|
"expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B024725377345BDE0E9C33AF3C43C0A29A9249F2F2956FA8CFEB55C8573D0262DC8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pnonce_indices": [2, 3],
|
||||||
|
"expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B000000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"comment": "Sum of second points encoded in the nonces is point at infinity which is serialized as 33 zero bytes"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"error_test_cases": [
|
||||||
|
{
|
||||||
|
"pnonce_indices": [0, 4],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 1,
|
||||||
|
"contrib": "pubnonce"
|
||||||
|
},
|
||||||
|
"comment": "Public nonce from signer 1 is invalid due wrong tag, 0x04, in the first half",
|
||||||
|
"btcec_err": "invalid public key: unsupported format: 4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pnonce_indices": [5, 1],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 0,
|
||||||
|
"contrib": "pubnonce"
|
||||||
|
},
|
||||||
|
"comment": "Public nonce from signer 0 is invalid because the second half does not correspond to an X coordinate",
|
||||||
|
"btcec_err": "invalid public key: x coordinate 48c264cdd57d3c24d79990b0f865674eb62a0f9018277a95011b41bfc193b831 is not on the secp256k1 curve"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pnonce_indices": [6, 1],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 0,
|
||||||
|
"contrib": "pubnonce"
|
||||||
|
},
|
||||||
|
"comment": "Public nonce from signer 0 is invalid because second half exceeds field size",
|
||||||
|
"btcec_err": "invalid public key: x >= field prime"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
36
btcec/schnorr/musig2/data/nonce_gen_vectors.json
Normal file
36
btcec/schnorr/musig2/data/nonce_gen_vectors.json
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"test_cases": [
|
||||||
|
{
|
||||||
|
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||||
|
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||||
|
"msg": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||||
|
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||||
|
"expected": "BC6C683EBBCC39DCB3C29B3D010D2AAA7C86CFB562FC41ED9A460EE061013E75FB4AD2F0B816713269800D018803906D5481E00A940EAB4F4AC49B4A372EB0F4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||||
|
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||||
|
"msg": "",
|
||||||
|
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||||
|
"expected": "AAC4BFD707F4953B4063851D7E4AAD5C59D5D0BFB0E71012788A85698B5ACF8F11834D5051928424BA501C8CD064F3F942F8D4A07D8A2ED79F153E4ABD9EBBE9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||||
|
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||||
|
"msg": "2626262626262626262626262626262626262626262626262626262626262626262626262626",
|
||||||
|
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||||
|
"expected": "DF54500DD2B503DBA3753C48A9D6B67E6C11EC4325EDD1DC256C7F75D6A85DBECA6D9857A6F3F292FB3B50DBCBF69FADB67B1CDDB0EA6EB693F6455C4C9088E1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"sk": null,
|
||||||
|
"aggpk": null,
|
||||||
|
"msg": null,
|
||||||
|
"extra_in": null,
|
||||||
|
"expected": "7B3B5A002356471AF0E961DE2549C121BD0D48ABCEEDC6E034BDDF86AD3E0A187ECEE674CEF7364B0BC4BEEFB8B66CAD89F98DE2F8C5A5EAD5D1D1E4BD7D04CD"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
86
btcec/schnorr/musig2/data/sig_agg_vectors.json
Normal file
86
btcec/schnorr/musig2/data/sig_agg_vectors.json
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
{
|
||||||
|
"pubkeys": [
|
||||||
|
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||||
|
"02D2DC6F5DF7C56ACF38C7FA0AE7A759AE30E19B37359DFDE015872324C7EF6E05",
|
||||||
|
"03C7FB101D97FF930ACD0C6760852EF64E69083DE0B06AC6335724754BB4B0522C",
|
||||||
|
"02352433B21E7E05D3B452B81CAE566E06D2E003ECE16D1074AABA4289E0E3D581"
|
||||||
|
],
|
||||||
|
"pnonces": [
|
||||||
|
"0300A32F8548F59C533F55DB9754E3C0BA3C2544F085649FDCE42B8BD3F244C2CA0384449BED61004E8863452A38534E91875516C3CC543122CE2BE1F31845025588",
|
||||||
|
"03F66B072A869BC2A57D776D487151D707E82B4F1B885066A589858C1BF3871DB603ED391C9658AB6031A96ACBD5E2D9FEC465EFDC8C0D0B765C9B9F3579D520FB6F",
|
||||||
|
"03A5791CA078E278126EF457C25B5C835F7282C0A47BDBF464BA35C3769427D5CD034D40350F8A5590985E38AAEFC3C695DF671C2E5498E2B60C082C546E06ECAF78",
|
||||||
|
"020DE6382B8C0550E8174D5263B981224EBCFEF7706588B6936177FEB68E639B8C02BA5F18DDB3487AD087F63CEF7D7818AC8ECA3D6B736113FF36FB25D113F514F6",
|
||||||
|
"031883080513BB69B31367F9A7B5F4E81246C627060A7414B7F137FA8459F261990345445505F158EDCFDF0D4BF26E04E018C143BF76B5D457AE57DF06CA41371DF0",
|
||||||
|
"0300028E83123E7FAB1E1F230547CE8B96CC23F13197312972DE72AACBA98EF9870274C2D8566E9E021AA7E2DDDA01B52AE670E0742418F147610528B65ACDB4D0B3"
|
||||||
|
],
|
||||||
|
"tweaks": [
|
||||||
|
"B511DA492182A91B0FFB9A98020D55F260AE86D7ECBD0399C7383D59A5F2AF7C",
|
||||||
|
"A815FE049EE3C5AAB66310477FBC8BCCCAC2F3395F59F921C364ACD78A2F48DC",
|
||||||
|
"75448A87274B056468B977BE06EB1E9F657577B7320B0A3376EA51FD420D18A8"
|
||||||
|
],
|
||||||
|
"psigs": [
|
||||||
|
"7918521F42E5727FE2E82D802876E0C8844336FDA1B58C82696A55B0188C8B3D",
|
||||||
|
"599044037AE15C4A99FB94F022B48E7AB215BF703954EC0B83D0E06230476001",
|
||||||
|
"F05BE3CA783AD1FAF68C5059B43F859BFD4EBB0242459DF2C6BF013F4217F7E7",
|
||||||
|
"BF85B2A751066466C24A5E7FA6C90DBAADAC2DF1F0BB48546AE239E340437CEB",
|
||||||
|
"142076B034A7401123EFB07E2317DF819B86B3FFA17180DDD093997D018270D0",
|
||||||
|
"B7A0C7F5B325B7993925E56B60F53EF8198169F31E1AF7E62BBEF1C5DCD1BA22",
|
||||||
|
"C717ECA32C148CE8EB8882CD9656DF9C64929DCAE9AF798E381B1E888DDF0F8F",
|
||||||
|
"5988823E78488D8005311E16E5EA67AF70514CB44F5A5CD51FFA262BEEAA21CE",
|
||||||
|
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141"
|
||||||
|
],
|
||||||
|
"msg": "599C67EA410D005B9DA90817CF03ED3B1C868E4DA4EDF00A5880B0082C237869",
|
||||||
|
"valid_test_cases": [
|
||||||
|
{
|
||||||
|
"aggnonce": "02BC34CDF6FA1298D7B6A126812FAD0739005BC44E45C21276EEFE41AAF841C86F03F3562AED52243BB99F43D1677DB59F0FEFB961633997F7AC924B78FBD0B0334F",
|
||||||
|
"nonce_indices": [0, 1],
|
||||||
|
"key_indices": [0, 1],
|
||||||
|
"tweak_indices": [],
|
||||||
|
"is_xonly": [],
|
||||||
|
"psig_indices": [0, 1],
|
||||||
|
"expected": "CA3C28729659E50F829F55DC5DB1DE88A05D1702B4165B85F95B627FC57733F8D2A89622BDC6CECA7CE3C2704B2B6F433658F66DDB0A788DED3B361248D3EB3E"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aggnonce": "035538518B8043CF4EACD0E701A80657B741C0E6445EC1D6C6177964D22C642971030CFE657EC882F4E08E751B883A78AC1491B30FC86CB57AF2DFF012C2BE6DF1F2",
|
||||||
|
"nonce_indices": [0, 2],
|
||||||
|
"key_indices": [0, 2],
|
||||||
|
"tweak_indices": [],
|
||||||
|
"is_xonly": [],
|
||||||
|
"psig_indices": [2, 3],
|
||||||
|
"expected": "3997A11DFF76349532CF25E761365EA1D4F24B62EB23A12A9DAABD5976C3DB9FAFE19671C9413661B8D6AED95B089357F04C0C0D83B8460B71CEDC95B2253391"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aggnonce": "024366775E6FFBEBBB954225936BAED71A3884C7933B18225088D19E7AF12D8D5D028D79A520B347B793FFE897A7EB79A4366A3FDCDC652C243FAC3976B3D6DF8AB2",
|
||||||
|
"nonce_indices": [0, 3],
|
||||||
|
"key_indices": [0, 2],
|
||||||
|
"tweak_indices": [0],
|
||||||
|
"is_xonly": [false],
|
||||||
|
"psig_indices": [4, 5],
|
||||||
|
"expected": "5AF759C2839B7FEE59D31DAB800F82FC21258457773A3B1F69F5228C80CAD4317EA39AD756601030E4D4051B7C9A25AB4DE7CB39BED26E0A03A1B2ED5B747F7F"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aggnonce": "03B25098C6D0B72DC5717314AF26C126609B4776AA468553DD4354EE20B216B227027D242E9203499173A74E286C1F796F2711E171EE937706BBEA2F4DB10C4E6809",
|
||||||
|
"nonce_indices": [0, 4],
|
||||||
|
"key_indices": [0, 3],
|
||||||
|
"tweak_indices": [0, 1, 2],
|
||||||
|
"is_xonly": [true, false, true],
|
||||||
|
"psig_indices": [6, 7],
|
||||||
|
"expected": "B495A478F91D6E10BF08A156E46D9E62B4C5399C1AEDDA1A9D306F06AFB8A52F2C078FD6B50DDBC33BFFE583C3C1E3D0D5E52891E190101C70D2278BCA943457"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"error_test_cases": [
|
||||||
|
{
|
||||||
|
"aggnonce": "03B25098C6D0B72DC5717314AF26C126609B4776AA468553DD4354EE20B216B227027D242E9203499173A74E286C1F796F2711E171EE937706BBEA2F4DB10C4E6809",
|
||||||
|
"nonce_indices": [0, 4],
|
||||||
|
"key_indices": [0, 3],
|
||||||
|
"tweak_indices": [0, 1, 2],
|
||||||
|
"is_xonly": [true, false, true],
|
||||||
|
"psig_indices": [7, 8],
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 1
|
||||||
|
},
|
||||||
|
"comment": "Partial signature is invalid because it exceeds group size"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
183
btcec/schnorr/musig2/data/sign_verify_vectors.json
Normal file
183
btcec/schnorr/musig2/data/sign_verify_vectors.json
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
{
|
||||||
|
"sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671",
|
||||||
|
"pubkeys": [
|
||||||
|
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||||
|
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA661",
|
||||||
|
"020000000000000000000000000000000000000000000000000000000000000007"
|
||||||
|
],
|
||||||
|
"secnonces": [
|
||||||
|
"508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F7",
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
],
|
||||||
|
"pnonces": [
|
||||||
|
"0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||||
|
"0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||||
|
"032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046",
|
||||||
|
"0237C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0387BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||||
|
"020000000000000000000000000000000000000000000000000000000000000009"
|
||||||
|
],
|
||||||
|
"aggnonces": [
|
||||||
|
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"048465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||||
|
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61020000000000000000000000000000000000000000000000000000000000000009",
|
||||||
|
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD6102FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30"
|
||||||
|
],
|
||||||
|
"msgs": [
|
||||||
|
"F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF",
|
||||||
|
"",
|
||||||
|
"2626262626262626262626262626262626262626262626262626262626262626262626262626"
|
||||||
|
],
|
||||||
|
"valid_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"nonce_indices": [0, 1, 2],
|
||||||
|
"aggnonce_index": 0,
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"expected": "012ABBCB52B3016AC03AD82395A1A415C48B93DEF78718E62A7A90052FE224FB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 0, 2],
|
||||||
|
"nonce_indices": [1, 0, 2],
|
||||||
|
"aggnonce_index": 0,
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 1,
|
||||||
|
"expected": "9FF2F7AAA856150CC8819254218D3ADEEB0535269051897724F9DB3789513A52"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"aggnonce_index": 0,
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "FA23C359F6FAC4E7796BB93BC9F0532A95468C539BA20FF86D7C76ED92227900"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 1],
|
||||||
|
"nonce_indices": [0, 3],
|
||||||
|
"aggnonce_index": 1,
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"expected": "AE386064B26105404798F75DE2EB9AF5EDA5387B064B83D049CB7C5E08879531",
|
||||||
|
"comment": "Both halves of aggregate nonce correspond to point at infinity"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sign_error_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [1, 0, 3],
|
||||||
|
"aggnonce_index": 0,
|
||||||
|
"msg_index": 0,
|
||||||
|
"secnonce_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 2,
|
||||||
|
"contrib": "pubkey"
|
||||||
|
},
|
||||||
|
"comment": "Signer 2 provided an invalid public key"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"aggnonce_index": 2,
|
||||||
|
"msg_index": 0,
|
||||||
|
"secnonce_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": null,
|
||||||
|
"contrib": "aggnonce"
|
||||||
|
},
|
||||||
|
"comment": "Aggregate nonce is invalid due wrong tag, 0x04, in the first half"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"aggnonce_index": 3,
|
||||||
|
"msg_index": 0,
|
||||||
|
"secnonce_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": null,
|
||||||
|
"contrib": "aggnonce"
|
||||||
|
},
|
||||||
|
"comment": "Aggregate nonce is invalid because the second half does not correspond to an X coordinate"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"aggnonce_index": 4,
|
||||||
|
"msg_index": 0,
|
||||||
|
"secnonce_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": null,
|
||||||
|
"contrib": "aggnonce"
|
||||||
|
},
|
||||||
|
"comment": "Aggregate nonce is invalid because second half exceeds field size"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"aggnonce_index": 0,
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"secnonce_index": 1,
|
||||||
|
"error": {
|
||||||
|
"type": "value",
|
||||||
|
"message": "first secnonce value is out of range."
|
||||||
|
},
|
||||||
|
"comment": "Secnonce is invalid which may indicate nonce reuse"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"verify_fail_test_cases": [
|
||||||
|
{
|
||||||
|
"sig": "97AC833ADCB1AFA42EBF9E0725616F3C9A0D5B614F6FE283CEAAA37A8FFAF406",
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"nonce_indices": [0, 1, 2],
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"comment": "Wrong signature (which is equal to the negation of valid signature)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"nonce_indices": [0, 1, 2],
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 1,
|
||||||
|
"comment": "Wrong signer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sig": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"nonce_indices": [0, 1, 2],
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"comment": "Signature exceeds group size"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"verify_error_test_cases": [
|
||||||
|
{
|
||||||
|
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||||
|
"key_indices": [0, 1, 2],
|
||||||
|
"nonce_indices": [4, 1, 2],
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 0,
|
||||||
|
"contrib": "pubnonce"
|
||||||
|
},
|
||||||
|
"comment": "Invalid pubnonce"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||||
|
"key_indices": [3, 1, 2],
|
||||||
|
"nonce_indices": [0, 1, 2],
|
||||||
|
"msg_index": 0,
|
||||||
|
"signer_index": 0,
|
||||||
|
"error": {
|
||||||
|
"type": "invalid_contribution",
|
||||||
|
"signer": 0,
|
||||||
|
"contrib": "pubkey"
|
||||||
|
},
|
||||||
|
"comment": "Invalid pubkey"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
84
btcec/schnorr/musig2/data/tweak_vectors.json
Normal file
84
btcec/schnorr/musig2/data/tweak_vectors.json
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
{
|
||||||
|
"sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671",
|
||||||
|
"pubkeys": [
|
||||||
|
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||||
|
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||||
|
"02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659"
|
||||||
|
],
|
||||||
|
"secnonce": "508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F7",
|
||||||
|
"pnonces": [
|
||||||
|
"0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||||
|
"0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||||
|
"032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046"
|
||||||
|
],
|
||||||
|
"aggnonce": "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||||
|
"tweaks": [
|
||||||
|
"E8F791FF9225A2AF0102AFFF4A9A723D9612A682A25EBE79802B263CDFCD83BB",
|
||||||
|
"AE2EA797CC0FE72AC5B97B97F3C6957D7E4199A167A58EB08BCAFFDA70AC0455",
|
||||||
|
"F52ECBC565B3D8BEA2DFD5B75A4F457E54369809322E4120831626F290FA87E0",
|
||||||
|
"1969AD73CC177FA0B4FCED6DF1F7BF9907E665FDE9BA196A74FED0A3CF5AEF9D",
|
||||||
|
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141"
|
||||||
|
],
|
||||||
|
"msg": "F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF",
|
||||||
|
"valid_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [0],
|
||||||
|
"is_xonly": [true],
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "E28A5C66E61E178C2BA19DB77B6CF9F7E2F0F56C17918CD13135E60CC848FE91",
|
||||||
|
"comment": "A single x-only tweak"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [0],
|
||||||
|
"is_xonly": [false],
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "38B0767798252F21BF5702C48028B095428320F73A4B14DB1E25DE58543D2D2D",
|
||||||
|
"comment": "A single plain tweak"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [0, 1],
|
||||||
|
"is_xonly": [false, true],
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "408A0A21C4A0F5DACAF9646AD6EB6FECD7F7A11F03ED1F48DFFF2185BC2C2408",
|
||||||
|
"comment": "A plain tweak followed by an x-only tweak"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [0, 1, 2, 3],
|
||||||
|
"is_xonly": [false, false, true, true],
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "45ABD206E61E3DF2EC9E264A6FEC8292141A633C28586388235541F9ADE75435",
|
||||||
|
"comment": "Four tweaks: plain, plain, x-only, x-only."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [0, 1, 2, 3],
|
||||||
|
"is_xonly": [true, false, true, false],
|
||||||
|
"signer_index": 2,
|
||||||
|
"expected": "B255FDCAC27B40C7CE7848E2D3B7BF5EA0ED756DA81565AC804CCCA3E1D5D239",
|
||||||
|
"comment": "Four tweaks: x-only, plain, x-only, plain. If an implementation prohibits applying plain tweaks after x-only tweaks, it can skip this test vector or return an error."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"error_test_cases": [
|
||||||
|
{
|
||||||
|
"key_indices": [1, 2, 0],
|
||||||
|
"nonce_indices": [1, 2, 0],
|
||||||
|
"tweak_indices": [4],
|
||||||
|
"is_xonly": [false],
|
||||||
|
"signer_index": 2,
|
||||||
|
"error": {
|
||||||
|
"type": "value",
|
||||||
|
"message": "The tweak must be less than n."
|
||||||
|
},
|
||||||
|
"comment": "Tweak is invalid because it exceeds group size"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -29,7 +29,7 @@ var (
|
|||||||
|
|
||||||
// ErrTweakedKeyOverflows is returned if a tweaking key is larger than
|
// ErrTweakedKeyOverflows is returned if a tweaking key is larger than
|
||||||
// 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141.
|
// 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141.
|
||||||
ErrTweakedKeyOverflows = fmt.Errorf("tweaked key is to large")
|
ErrTweakedKeyOverflows = fmt.Errorf("tweaked key is too large")
|
||||||
)
|
)
|
||||||
|
|
||||||
// sortableKeys defines a type of slice of public keys that implements the sort
|
// sortableKeys defines a type of slice of public keys that implements the sort
|
||||||
@ -40,8 +40,8 @@ type sortableKeys []*btcec.PublicKey
|
|||||||
// with index j.
|
// with index j.
|
||||||
func (s sortableKeys) Less(i, j int) bool {
|
func (s sortableKeys) Less(i, j int) bool {
|
||||||
// TODO(roasbeef): more efficient way to compare...
|
// TODO(roasbeef): more efficient way to compare...
|
||||||
keyIBytes := schnorr.SerializePubKey(s[i])
|
keyIBytes := s[i].SerializeCompressed()
|
||||||
keyJBytes := schnorr.SerializePubKey(s[j])
|
keyJBytes := s[j].SerializeCompressed()
|
||||||
|
|
||||||
return bytes.Compare(keyIBytes, keyJBytes) == -1
|
return bytes.Compare(keyIBytes, keyJBytes) == -1
|
||||||
}
|
}
|
||||||
@ -56,9 +56,9 @@ func (s sortableKeys) Len() int {
|
|||||||
return len(s)
|
return len(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// sortKeys takes a set of schnorr public keys and returns a new slice that is
|
// sortKeys takes a set of public keys and returns a new slice that is a copy
|
||||||
// a copy of the keys sorted in lexicographical order bytes on the x-only
|
// of the keys sorted in lexicographical order bytes on the x-only pubkey
|
||||||
// pubkey serialization.
|
// serialization.
|
||||||
func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey {
|
func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey {
|
||||||
keySet := sortableKeys(keys)
|
keySet := sortableKeys(keys)
|
||||||
if sort.IsSorted(keySet) {
|
if sort.IsSorted(keySet) {
|
||||||
@ -72,7 +72,7 @@ func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey {
|
|||||||
// keyHashFingerprint computes the tagged hash of the series of (sorted) public
|
// keyHashFingerprint computes the tagged hash of the series of (sorted) public
|
||||||
// keys passed as input. This is used to compute the aggregation coefficient
|
// keys passed as input. This is used to compute the aggregation coefficient
|
||||||
// for each key. The final computation is:
|
// for each key. The final computation is:
|
||||||
// * H(tag=KeyAgg list, pk1 || pk2..)
|
// - H(tag=KeyAgg list, pk1 || pk2..)
|
||||||
func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte {
|
func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte {
|
||||||
if sort {
|
if sort {
|
||||||
keys = sortKeys(keys)
|
keys = sortKeys(keys)
|
||||||
@ -80,28 +80,25 @@ func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte {
|
|||||||
|
|
||||||
// We'll create a single buffer and slice into that so the bytes buffer
|
// We'll create a single buffer and slice into that so the bytes buffer
|
||||||
// doesn't continually need to grow the underlying buffer.
|
// doesn't continually need to grow the underlying buffer.
|
||||||
keyAggBuf := make([]byte, 32*len(keys))
|
keyAggBuf := make([]byte, 33*len(keys))
|
||||||
keyBytes := bytes.NewBuffer(keyAggBuf[0:0])
|
keyBytes := bytes.NewBuffer(keyAggBuf[0:0])
|
||||||
for _, key := range keys {
|
for _, key := range keys {
|
||||||
keyBytes.Write(schnorr.SerializePubKey(key))
|
keyBytes.Write(key.SerializeCompressed())
|
||||||
}
|
}
|
||||||
|
|
||||||
h := chainhash.TaggedHash(KeyAggTagList, keyBytes.Bytes())
|
h := chainhash.TaggedHash(KeyAggTagList, keyBytes.Bytes())
|
||||||
return h[:]
|
return h[:]
|
||||||
}
|
}
|
||||||
|
|
||||||
// keyBytesEqual returns true if two keys are the same from the PoV of BIP
|
// keyBytesEqual returns true if two keys are the same based on the compressed
|
||||||
// 340's 32-byte x-only public keys.
|
// serialization of each key.
|
||||||
func keyBytesEqual(a, b *btcec.PublicKey) bool {
|
func keyBytesEqual(a, b *btcec.PublicKey) bool {
|
||||||
return bytes.Equal(
|
return bytes.Equal(a.SerializeCompressed(), b.SerializeCompressed())
|
||||||
schnorr.SerializePubKey(a),
|
|
||||||
schnorr.SerializePubKey(b),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// aggregationCoefficient computes the key aggregation coefficient for the
|
// aggregationCoefficient computes the key aggregation coefficient for the
|
||||||
// specified target key. The coefficient is computed as:
|
// specified target key. The coefficient is computed as:
|
||||||
// * H(tag=KeyAgg coefficient, keyHashFingerprint(pks) || pk)
|
// - H(tag=KeyAgg coefficient, keyHashFingerprint(pks) || pk)
|
||||||
func aggregationCoefficient(keySet []*btcec.PublicKey,
|
func aggregationCoefficient(keySet []*btcec.PublicKey,
|
||||||
targetKey *btcec.PublicKey, keysHash []byte,
|
targetKey *btcec.PublicKey, keysHash []byte,
|
||||||
secondKeyIdx int) *btcec.ModNScalar {
|
secondKeyIdx int) *btcec.ModNScalar {
|
||||||
@ -116,9 +113,9 @@ func aggregationCoefficient(keySet []*btcec.PublicKey,
|
|||||||
// Otherwise, we'll compute the full finger print hash for this given
|
// Otherwise, we'll compute the full finger print hash for this given
|
||||||
// key and then use that to compute the coefficient tagged hash:
|
// key and then use that to compute the coefficient tagged hash:
|
||||||
// * H(tag=KeyAgg coefficient, keyHashFingerprint(pks, pk) || pk)
|
// * H(tag=KeyAgg coefficient, keyHashFingerprint(pks, pk) || pk)
|
||||||
var coefficientBytes [64]byte
|
var coefficientBytes [65]byte
|
||||||
copy(coefficientBytes[:], keysHash[:])
|
copy(coefficientBytes[:], keysHash[:])
|
||||||
copy(coefficientBytes[32:], schnorr.SerializePubKey(targetKey))
|
copy(coefficientBytes[32:], targetKey.SerializeCompressed())
|
||||||
|
|
||||||
muHash := chainhash.TaggedHash(KeyAggTagCoeff, coefficientBytes[:])
|
muHash := chainhash.TaggedHash(KeyAggTagCoeff, coefficientBytes[:])
|
||||||
|
|
||||||
|
394
btcec/schnorr/musig2/keys_test.go
Normal file
394
btcec/schnorr/musig2/keys_test.go
Normal file
@ -0,0 +1,394 @@
|
|||||||
|
// Copyright 2013-2022 The btcsuite developers
|
||||||
|
|
||||||
|
package musig2
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/btcsuite/btcd/btcec/v2"
|
||||||
|
"github.com/btcsuite/btcd/btcec/v2/schnorr"
|
||||||
|
secp "github.com/decred/dcrd/dcrec/secp256k1/v4"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
keySortTestVectorFileName = "key_sort_vectors.json"
|
||||||
|
|
||||||
|
keyAggTestVectorFileName = "key_agg_vectors.json"
|
||||||
|
|
||||||
|
keyTweakTestVectorFileName = "tweak_vectors.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
type keySortTestVector struct {
|
||||||
|
PubKeys []string `json:"pubkeys"`
|
||||||
|
|
||||||
|
SortedKeys []string `json:"sorted_pubkeys"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMusig2KeySort tests that keys are properly sorted according to the
|
||||||
|
// musig2 test vectors.
|
||||||
|
func TestMusig2KeySort(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, keySortTestVectorFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCase keySortTestVector
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCase))
|
||||||
|
|
||||||
|
keys := make([]*btcec.PublicKey, len(testCase.PubKeys))
|
||||||
|
for i, keyStr := range testCase.PubKeys {
|
||||||
|
pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
keys[i] = pubKey
|
||||||
|
}
|
||||||
|
|
||||||
|
sortedKeys := sortKeys(keys)
|
||||||
|
|
||||||
|
expectedKeys := make([]*btcec.PublicKey, len(testCase.PubKeys))
|
||||||
|
for i, keyStr := range testCase.SortedKeys {
|
||||||
|
pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedKeys[i] = pubKey
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, sortedKeys, expectedKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyAggValidTest struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyAggError struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Signer int `json:"signer"`
|
||||||
|
Contring string `json:"contrib"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyAggInvalidTest struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
TweakIndices []int `json:"tweak_indices"`
|
||||||
|
|
||||||
|
IsXOnly []bool `json:"is_xonly"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyAggTestVectors struct {
|
||||||
|
PubKeys []string `json:"pubkeys"`
|
||||||
|
|
||||||
|
Tweaks []string `json:"tweaks"`
|
||||||
|
|
||||||
|
ValidCases []keyAggValidTest `json:"valid_test_cases"`
|
||||||
|
|
||||||
|
InvalidCases []keyAggInvalidTest `json:"error_test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func keysFromIndices(t *testing.T, indices []int,
|
||||||
|
pubKeys []string) ([]*btcec.PublicKey, error) {
|
||||||
|
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
inputKeys := make([]*btcec.PublicKey, len(indices))
|
||||||
|
for i, keyIdx := range indices {
|
||||||
|
var err error
|
||||||
|
inputKeys[i], err = btcec.ParsePubKey(
|
||||||
|
mustParseHex(pubKeys[keyIdx]),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return inputKeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tweaksFromIndices(t *testing.T, indices []int,
|
||||||
|
tweaks []string, isXonly []bool) []KeyTweakDesc {
|
||||||
|
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
testTweaks := make([]KeyTweakDesc, len(indices))
|
||||||
|
for i, idx := range indices {
|
||||||
|
var rawTweak [32]byte
|
||||||
|
copy(rawTweak[:], mustParseHex(tweaks[idx]))
|
||||||
|
|
||||||
|
testTweaks[i] = KeyTweakDesc{
|
||||||
|
Tweak: rawTweak,
|
||||||
|
IsXOnly: isXonly[i],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return testTweaks
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMuSig2KeyAggTestVectors tests that this implementation of musig2 key
|
||||||
|
// aggregation lines up with the secp256k1-zkp test vectors.
|
||||||
|
func TestMuSig2KeyAggTestVectors(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, keyAggTestVectorFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases keyAggTestVectors
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
tweaks := make([][]byte, len(testCases.Tweaks))
|
||||||
|
for i := range testCases.Tweaks {
|
||||||
|
tweaks[i] = mustParseHex(testCases.Tweaks[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, testCase := range testCases.ValidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
// Assemble the set of keys we'll pass in based on their key
|
||||||
|
// index. We don't use sorting to ensure we send the keys in
|
||||||
|
// the exact same order as the test vectors do.
|
||||||
|
inputKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
t.Run(fmt.Sprintf("test_case=%v", i), func(t *testing.T) {
|
||||||
|
uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false)
|
||||||
|
opts := []KeyAggOption{WithUniqueKeyIndex(uniqueKeyIndex)}
|
||||||
|
|
||||||
|
combinedKey, _, _, err := AggregateKeys(
|
||||||
|
inputKeys, false, opts...,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.Equal(
|
||||||
|
t, schnorr.SerializePubKey(combinedKey.FinalKey),
|
||||||
|
mustParseHex(testCase.Expected),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases.InvalidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("invalid_%v",
|
||||||
|
strings.ToLower(testCase.Comment))
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
// For each test, we'll extract the set of input keys
|
||||||
|
// as well as the tweaks since this set of cases also
|
||||||
|
// exercises error cases related to the set of tweaks.
|
||||||
|
inputKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
|
||||||
|
// In this set of test cases, we should only get this
|
||||||
|
// for the very first vector.
|
||||||
|
if err != nil {
|
||||||
|
switch testCase.Comment {
|
||||||
|
case "Invalid public key":
|
||||||
|
require.ErrorIs(
|
||||||
|
t, err,
|
||||||
|
secp.ErrPubKeyNotOnCurve,
|
||||||
|
)
|
||||||
|
|
||||||
|
case "Public key exceeds field size":
|
||||||
|
require.ErrorIs(
|
||||||
|
t, err, secp.ErrPubKeyXTooBig,
|
||||||
|
)
|
||||||
|
|
||||||
|
case "First byte of public key is not 2 or 3":
|
||||||
|
require.ErrorIs(
|
||||||
|
t, err,
|
||||||
|
secp.ErrPubKeyInvalidFormat,
|
||||||
|
)
|
||||||
|
|
||||||
|
default:
|
||||||
|
t.Fatalf("uncaught err: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var tweaks []KeyTweakDesc
|
||||||
|
if len(testCase.TweakIndices) != 0 {
|
||||||
|
tweaks = tweaksFromIndices(
|
||||||
|
t, testCase.TweakIndices, testCases.Tweaks,
|
||||||
|
testCase.IsXOnly,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false)
|
||||||
|
opts := []KeyAggOption{
|
||||||
|
WithUniqueKeyIndex(uniqueKeyIndex),
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(tweaks) != 0 {
|
||||||
|
opts = append(opts, WithKeyTweaks(tweaks...))
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, _, err = AggregateKeys(
|
||||||
|
inputKeys, false, opts...,
|
||||||
|
)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
switch testCase.Comment {
|
||||||
|
case "Tweak is out of range":
|
||||||
|
require.ErrorIs(t, err, ErrTweakedKeyOverflows)
|
||||||
|
|
||||||
|
case "Intermediate tweaking result is point at infinity":
|
||||||
|
|
||||||
|
require.ErrorIs(t, err, ErrTweakedKeyIsInfinity)
|
||||||
|
|
||||||
|
default:
|
||||||
|
t.Fatalf("uncaught err: %v", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyTweakInvalidTest struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
TweakIndices []int `json:"tweak_indices"`
|
||||||
|
|
||||||
|
IsXOnly []bool `json:"is_only"`
|
||||||
|
|
||||||
|
SignerIndex int `json:"signer_index"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyTweakValidTest struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
TweakIndices []int `json:"tweak_indices"`
|
||||||
|
|
||||||
|
IsXOnly []bool `json:"is_xonly"`
|
||||||
|
|
||||||
|
SignerIndex int `json:"signer_index"`
|
||||||
|
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyTweakVector struct {
|
||||||
|
PrivKey string `json:"sk"`
|
||||||
|
|
||||||
|
PubKeys []string `json:"pubkeys"`
|
||||||
|
|
||||||
|
PrivNonce string `json:"secnonce"`
|
||||||
|
|
||||||
|
PubNonces []string `json:"pnonces"`
|
||||||
|
|
||||||
|
AggNnoce string `json:"aggnonce"`
|
||||||
|
|
||||||
|
Tweaks []string `json:"tweaks"`
|
||||||
|
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
|
||||||
|
ValidCases []keyTweakValidTest `json:"valid_test_cases"`
|
||||||
|
|
||||||
|
InvalidCases []keyTweakInvalidTest `json:"error_test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func pubNoncesFromIndices(t *testing.T, nonceIndices []int, pubNonces []string) [][PubNonceSize]byte {
|
||||||
|
|
||||||
|
nonces := make([][PubNonceSize]byte, len(nonceIndices))
|
||||||
|
|
||||||
|
for i, idx := range nonceIndices {
|
||||||
|
var pubNonce [PubNonceSize]byte
|
||||||
|
copy(pubNonce[:], mustParseHex(pubNonces[idx]))
|
||||||
|
|
||||||
|
nonces[i] = pubNonce
|
||||||
|
}
|
||||||
|
|
||||||
|
return nonces
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMuSig2TweakTestVectors tests that we properly handle the various edge
|
||||||
|
// cases related to tweaking public keys.
|
||||||
|
func TestMuSig2TweakTestVectors(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, keyTweakTestVectorFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases keyTweakVector
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
privKey, _ := btcec.PrivKeyFromBytes(mustParseHex(testCases.PrivKey))
|
||||||
|
|
||||||
|
var msg [32]byte
|
||||||
|
copy(msg[:], mustParseHex(testCases.Msg))
|
||||||
|
|
||||||
|
var secNonce [SecNonceSize]byte
|
||||||
|
copy(secNonce[:], mustParseHex(testCases.PrivNonce))
|
||||||
|
|
||||||
|
for _, testCase := range testCases.ValidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("valid_%v",
|
||||||
|
strings.ToLower(testCase.Comment))
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
pubKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var tweaks []KeyTweakDesc
|
||||||
|
if len(testCase.TweakIndices) != 0 {
|
||||||
|
tweaks = tweaksFromIndices(
|
||||||
|
t, testCase.TweakIndices,
|
||||||
|
testCases.Tweaks, testCase.IsXOnly,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pubNonces := pubNoncesFromIndices(
|
||||||
|
t, testCase.NonceIndices, testCases.PubNonces,
|
||||||
|
)
|
||||||
|
|
||||||
|
combinedNonce, err := AggregateNonces(pubNonces)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var opts []SignOption
|
||||||
|
if len(tweaks) != 0 {
|
||||||
|
opts = append(opts, WithTweaks(tweaks...))
|
||||||
|
}
|
||||||
|
|
||||||
|
partialSig, err := Sign(
|
||||||
|
secNonce, privKey, combinedNonce, pubKeys,
|
||||||
|
msg, opts...,
|
||||||
|
)
|
||||||
|
|
||||||
|
var partialSigBytes [32]byte
|
||||||
|
partialSig.S.PutBytesUnchecked(partialSigBytes[:])
|
||||||
|
|
||||||
|
require.Equal(
|
||||||
|
t, hex.EncodeToString(partialSigBytes[:]),
|
||||||
|
hex.EncodeToString(mustParseHex(testCase.Expected)),
|
||||||
|
)
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -184,13 +184,15 @@ func withCustomOptions(customOpts nonceGenOpts) NonceGenOption {
|
|||||||
o.randReader = customOpts.randReader
|
o.randReader = customOpts.randReader
|
||||||
o.secretKey = customOpts.secretKey
|
o.secretKey = customOpts.secretKey
|
||||||
o.combinedKey = customOpts.combinedKey
|
o.combinedKey = customOpts.combinedKey
|
||||||
o.msg = customOpts.msg
|
|
||||||
o.auxInput = customOpts.auxInput
|
o.auxInput = customOpts.auxInput
|
||||||
|
o.msg = customOpts.msg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// lengthWriter is a function closure that allows a caller to control how the
|
// lengthWriter is a function closure that allows a caller to control how the
|
||||||
// length prefix of a byte slice is written.
|
// length prefix of a byte slice is written.
|
||||||
|
//
|
||||||
|
// TODO(roasbeef): use type params once we bump repo version
|
||||||
type lengthWriter func(w io.Writer, b []byte) error
|
type lengthWriter func(w io.Writer, b []byte) error
|
||||||
|
|
||||||
// uint8Writer is an implementation of lengthWriter that writes the length of
|
// uint8Writer is an implementation of lengthWriter that writes the length of
|
||||||
@ -205,6 +207,12 @@ func uint32Writer(w io.Writer, b []byte) error {
|
|||||||
return binary.Write(w, byteOrder, uint32(len(b)))
|
return binary.Write(w, byteOrder, uint32(len(b)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// uint32Writer is an implementation of lengthWriter that writes the length of
|
||||||
|
// the byte slice using 8 bytes.
|
||||||
|
func uint64Writer(w io.Writer, b []byte) error {
|
||||||
|
return binary.Write(w, byteOrder, uint64(len(b)))
|
||||||
|
}
|
||||||
|
|
||||||
// writeBytesPrefix is used to write out: len(b) || b, to the passed io.Writer.
|
// writeBytesPrefix is used to write out: len(b) || b, to the passed io.Writer.
|
||||||
// The lengthWriter function closure is used to allow the caller to specify the
|
// The lengthWriter function closure is used to allow the caller to specify the
|
||||||
// precise byte packing of the length.
|
// precise byte packing of the length.
|
||||||
@ -225,10 +233,12 @@ func writeBytesPrefix(w io.Writer, b []byte, lenWriter lengthWriter) error {
|
|||||||
// genNonceAuxBytes writes out the full byte string used to derive a secret
|
// genNonceAuxBytes writes out the full byte string used to derive a secret
|
||||||
// nonce based on some initial randomness as well as the series of optional
|
// nonce based on some initial randomness as well as the series of optional
|
||||||
// fields. The byte string used for derivation is:
|
// fields. The byte string used for derivation is:
|
||||||
// * tagged_hash("MuSig/nonce", rand || len(aggpk) || aggpk || len(m)
|
// - tagged_hash("MuSig/nonce", rand || len(aggpk) || aggpk || m_prefixed
|
||||||
// || m || len(in) || in || i).
|
// || len(in) || in || i).
|
||||||
//
|
//
|
||||||
// where i is the ith secret nonce being generated.
|
// where i is the ith secret nonce being generated and m_prefixed is:
|
||||||
|
// - bytes(1, 0) if the message is blank
|
||||||
|
// - bytes(1, 1) || bytes(8, len(m)) || m if the message is present.
|
||||||
func genNonceAuxBytes(rand []byte, i int,
|
func genNonceAuxBytes(rand []byte, i int,
|
||||||
opts *nonceGenOpts) (*chainhash.Hash, error) {
|
opts *nonceGenOpts) (*chainhash.Hash, error) {
|
||||||
|
|
||||||
@ -245,10 +255,28 @@ func genNonceAuxBytes(rand []byte, i int,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next, we'll write out the length prefixed message.
|
switch {
|
||||||
err = writeBytesPrefix(&w, opts.msg, uint8Writer)
|
// If the message isn't present, then we'll just write out a single
|
||||||
if err != nil {
|
// uint8 of a zero byte: m_prefixed = bytes(1, 0).
|
||||||
return nil, err
|
case opts.msg == nil:
|
||||||
|
if _, err := w.Write([]byte{0x00}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we'll write a single byte of 0x01 with a 1 byte length
|
||||||
|
// prefix, followed by the message itself with an 8 byte length prefix:
|
||||||
|
// m_prefixed = bytes(1, 1) || bytes(8, len(m)) || m.
|
||||||
|
case len(opts.msg) == 0:
|
||||||
|
fallthrough
|
||||||
|
default:
|
||||||
|
if _, err := w.Write([]byte{0x01}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = writeBytesPrefix(&w, opts.msg, uint64Writer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Finally we'll write out the auxiliary input.
|
// Finally we'll write out the auxiliary input.
|
||||||
|
164
btcec/schnorr/musig2/nonces_test.go
Normal file
164
btcec/schnorr/musig2/nonces_test.go
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
// Copyright 2013-2022 The btcsuite developers
|
||||||
|
|
||||||
|
package musig2
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
type nonceGenTestCase struct {
|
||||||
|
Rand string `json:"rand_"`
|
||||||
|
Sk string `json:"sk"`
|
||||||
|
AggPk string `json:"aggpk"`
|
||||||
|
Msg *string `json:"msg"`
|
||||||
|
ExtraIn string `json:"extra_in"`
|
||||||
|
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nonceGenTestCases struct {
|
||||||
|
TestCases []nonceGenTestCase `json:"test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
nonceGenTestVectorsFileName = "nonce_gen_vectors.json"
|
||||||
|
nonceAggTestVectorsFileName = "nonce_agg_vectors.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestMusig2NonceGenTestVectors tests the nonce generation function with the
|
||||||
|
// testvectors defined in the Musig2 BIP.
|
||||||
|
func TestMusig2NonceGenTestVectors(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, nonceGenTestVectorsFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases nonceGenTestCases
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
for i, testCase := range testCases.TestCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
customOpts := nonceGenOpts{
|
||||||
|
randReader: &memsetRandReader{i: 0},
|
||||||
|
secretKey: mustParseHex(testCase.Sk),
|
||||||
|
combinedKey: mustParseHex(testCase.AggPk),
|
||||||
|
auxInput: mustParseHex(testCase.ExtraIn),
|
||||||
|
}
|
||||||
|
if testCase.Msg != nil {
|
||||||
|
customOpts.msg = mustParseHex(*testCase.Msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run(fmt.Sprintf("test_case=%v", i), func(t *testing.T) {
|
||||||
|
nonce, err := GenNonces(withCustomOptions(customOpts))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err gen nonce aux bytes %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedBytes, _ := hex.DecodeString(testCase.Expected)
|
||||||
|
if !bytes.Equal(nonce.SecNonce[:], expectedBytes) {
|
||||||
|
|
||||||
|
t.Fatalf("nonces don't match: expected %x, got %x",
|
||||||
|
expectedBytes, nonce.SecNonce[:])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type nonceAggError struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Signer int `json:"signer"`
|
||||||
|
Contrib string `json:"contrib"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nonceAggValidCase struct {
|
||||||
|
Indices []int `json:"pnonce_indices"`
|
||||||
|
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nonceAggInvalidCase struct {
|
||||||
|
Indices []int `json:"pnonce_indices"`
|
||||||
|
|
||||||
|
Error nonceAggError `json:"error"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
|
||||||
|
ExpectedErr string `json:"btcec_err"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nonceAggTestCases struct {
|
||||||
|
Nonces []string `json:"pnonces"`
|
||||||
|
|
||||||
|
ValidCases []nonceAggValidCase `json:"valid_test_cases"`
|
||||||
|
|
||||||
|
InvalidCases []nonceAggInvalidCase `json:"error_test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMusig2AggregateNoncesTestVectors tests that the musig2 implementation
|
||||||
|
// passes the nonce aggregration test vectors for musig2 1.0.
|
||||||
|
func TestMusig2AggregateNoncesTestVectors(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, nonceAggTestVectorsFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases nonceAggTestCases
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
nonces := make([][PubNonceSize]byte, len(testCases.Nonces))
|
||||||
|
for i := range testCases.Nonces {
|
||||||
|
var nonce [PubNonceSize]byte
|
||||||
|
copy(nonce[:], mustParseHex(testCases.Nonces[i]))
|
||||||
|
|
||||||
|
nonces[i] = nonce
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, testCase := range testCases.ValidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
var testNonces [][PubNonceSize]byte
|
||||||
|
for _, idx := range testCase.Indices {
|
||||||
|
testNonces = append(testNonces, nonces[idx])
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run(fmt.Sprintf("valid_case=%v", i), func(t *testing.T) {
|
||||||
|
aggregatedNonce, err := AggregateNonces(testNonces)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var expectedNonce [PubNonceSize]byte
|
||||||
|
copy(expectedNonce[:], mustParseHex(testCase.Expected))
|
||||||
|
|
||||||
|
require.Equal(t, aggregatedNonce[:], expectedNonce[:])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, testCase := range testCases.InvalidCases {
|
||||||
|
var testNonces [][PubNonceSize]byte
|
||||||
|
for _, idx := range testCase.Indices {
|
||||||
|
testNonces = append(testNonces, nonces[idx])
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run(fmt.Sprintf("invalid_case=%v", i), func(t *testing.T) {
|
||||||
|
_, err := AggregateNonces(testNonces)
|
||||||
|
require.True(t, err != nil)
|
||||||
|
require.Equal(t, testCase.ExpectedErr, err.Error())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -186,6 +186,58 @@ func WithBip86SignTweak() SignOption {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// computeSigningNonce calculates the final nonce used for signing. This will
|
||||||
|
// be the R value used in the final signature.
|
||||||
|
func computeSigningNonce(combinedNonce [PubNonceSize]byte,
|
||||||
|
combinedKey *btcec.PublicKey, msg [32]byte) (
|
||||||
|
*btcec.JacobianPoint, *btcec.ModNScalar, error) {
|
||||||
|
|
||||||
|
// Next we'll compute the value b, that blinds our second public
|
||||||
|
// nonce:
|
||||||
|
// * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m).
|
||||||
|
var (
|
||||||
|
nonceMsgBuf bytes.Buffer
|
||||||
|
nonceBlinder btcec.ModNScalar
|
||||||
|
)
|
||||||
|
nonceMsgBuf.Write(combinedNonce[:])
|
||||||
|
nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey))
|
||||||
|
nonceMsgBuf.Write(msg[:])
|
||||||
|
nonceBlindHash := chainhash.TaggedHash(
|
||||||
|
NonceBlindTag, nonceMsgBuf.Bytes(),
|
||||||
|
)
|
||||||
|
nonceBlinder.SetByteSlice(nonceBlindHash[:])
|
||||||
|
|
||||||
|
// Next, we'll parse the public nonces into R1 and R2.
|
||||||
|
r1J, err := btcec.ParseJacobian(
|
||||||
|
combinedNonce[:btcec.PubKeyBytesLenCompressed],
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
r2J, err := btcec.ParseJacobian(
|
||||||
|
combinedNonce[btcec.PubKeyBytesLenCompressed:],
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// With our nonce blinding value, we'll now combine both the public
|
||||||
|
// nonces, using the blinding factor to tweak the second nonce:
|
||||||
|
// * R = R_1 + b*R_2
|
||||||
|
var nonce btcec.JacobianPoint
|
||||||
|
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
||||||
|
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
||||||
|
|
||||||
|
// If the combined nonce is the point at infinity, we'll use the
|
||||||
|
// generator point instead.
|
||||||
|
if nonce == infinityPoint {
|
||||||
|
G := btcec.Generator()
|
||||||
|
G.AsJacobian(&nonce)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &nonce, &nonceBlinder, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Sign generates a musig2 partial signature given the passed key set, secret
|
// Sign generates a musig2 partial signature given the passed key set, secret
|
||||||
// nonce, public nonce, and private keys. This method returns an error if the
|
// nonce, public nonce, and private keys. This method returns an error if the
|
||||||
// generated nonces are either too large, or end up mapping to the point at
|
// generated nonces are either too large, or end up mapping to the point at
|
||||||
@ -230,46 +282,14 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next we'll compute the value b, that blinds our second public
|
// We'll now combine both the public nonces, using the blinding factor
|
||||||
// nonce:
|
// to tweak the second nonce:
|
||||||
// * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m).
|
|
||||||
var (
|
|
||||||
nonceMsgBuf bytes.Buffer
|
|
||||||
nonceBlinder btcec.ModNScalar
|
|
||||||
)
|
|
||||||
nonceMsgBuf.Write(combinedNonce[:])
|
|
||||||
nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey.FinalKey))
|
|
||||||
nonceMsgBuf.Write(msg[:])
|
|
||||||
nonceBlindHash := chainhash.TaggedHash(
|
|
||||||
NonceBlindTag, nonceMsgBuf.Bytes(),
|
|
||||||
)
|
|
||||||
nonceBlinder.SetByteSlice(nonceBlindHash[:])
|
|
||||||
|
|
||||||
// Next, we'll parse the public nonces into R1 and R2.
|
|
||||||
r1J, err := btcec.ParseJacobian(
|
|
||||||
combinedNonce[:btcec.PubKeyBytesLenCompressed],
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
r2J, err := btcec.ParseJacobian(
|
|
||||||
combinedNonce[btcec.PubKeyBytesLenCompressed:],
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// With our nonce blinding value, we'll now combine both the public
|
|
||||||
// nonces, using the blinding factor to tweak the second nonce:
|
|
||||||
// * R = R_1 + b*R_2
|
// * R = R_1 + b*R_2
|
||||||
var nonce btcec.JacobianPoint
|
nonce, nonceBlinder, err := computeSigningNonce(
|
||||||
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
combinedNonce, combinedKey.FinalKey, msg,
|
||||||
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
)
|
||||||
|
if err != nil {
|
||||||
// If the combined nonce it eh point at infinity, then we'll bail out.
|
return nil, err
|
||||||
if nonce == infinityPoint {
|
|
||||||
G := btcec.Generator()
|
|
||||||
G.AsJacobian(&nonce)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next we'll parse out our two secret nonces, which we'll be using in
|
// Next we'll parse out our two secret nonces, which we'll be using in
|
||||||
@ -299,31 +319,22 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey,
|
|||||||
}
|
}
|
||||||
|
|
||||||
pubKey := privKey.PubKey()
|
pubKey := privKey.PubKey()
|
||||||
pubKeyYIsOdd := func() bool {
|
|
||||||
pubKeyBytes := pubKey.SerializeCompressed()
|
|
||||||
return pubKeyBytes[0] == secp.PubKeyFormatCompressedOdd
|
|
||||||
}()
|
|
||||||
combinedKeyYIsOdd := func() bool {
|
combinedKeyYIsOdd := func() bool {
|
||||||
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
||||||
return combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd
|
return combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Next we'll compute our two parity factors for Q the combined public
|
// Next we'll compute the two parity factors for Q, the combined key.
|
||||||
// key, and P, the public key we're signing with. If the keys are odd,
|
// If the key is odd, then we'll negate it.
|
||||||
// then we'll negate them.
|
|
||||||
parityCombinedKey := new(btcec.ModNScalar).SetInt(1)
|
parityCombinedKey := new(btcec.ModNScalar).SetInt(1)
|
||||||
paritySignKey := new(btcec.ModNScalar).SetInt(1)
|
|
||||||
if combinedKeyYIsOdd {
|
if combinedKeyYIsOdd {
|
||||||
parityCombinedKey.Negate()
|
parityCombinedKey.Negate()
|
||||||
}
|
}
|
||||||
if pubKeyYIsOdd {
|
|
||||||
paritySignKey.Negate()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Before we sign below, we'll multiply by our various parity factors
|
// Before we sign below, we'll multiply by our various parity factors
|
||||||
// to ensure that the signing key is properly negated (if necessary):
|
// to ensure that the signing key is properly negated (if necessary):
|
||||||
// * d = gv⋅gaccv⋅gp⋅d'
|
// * d = g⋅gacc⋅d'
|
||||||
privKeyScalar.Mul(parityCombinedKey).Mul(paritySignKey).Mul(parityAcc)
|
privKeyScalar.Mul(parityCombinedKey).Mul(parityAcc)
|
||||||
|
|
||||||
// Next we'll create the challenge hash that commits to the combined
|
// Next we'll create the challenge hash that commits to the combined
|
||||||
// nonce, combined public key and also the message:
|
// nonce, combined public key and also the message:
|
||||||
@ -345,7 +356,7 @@ func Sign(secNonce [SecNonceSize]byte, privKey *btcec.PrivateKey,
|
|||||||
// With mu constructed, we can finally generate our partial signature
|
// With mu constructed, we can finally generate our partial signature
|
||||||
// as: s = (k1_1 + b*k_2 + e*a*d) mod n.
|
// as: s = (k1_1 + b*k_2 + e*a*d) mod n.
|
||||||
s := new(btcec.ModNScalar)
|
s := new(btcec.ModNScalar)
|
||||||
s.Add(&k1).Add(k2.Mul(&nonceBlinder)).Add(e.Mul(a).Mul(&privKeyScalar))
|
s.Add(&k1).Add(k2.Mul(nonceBlinder)).Add(e.Mul(a).Mul(&privKeyScalar))
|
||||||
|
|
||||||
sig := NewPartialSignature(s, nonceKey)
|
sig := NewPartialSignature(s, nonceKey)
|
||||||
|
|
||||||
@ -372,7 +383,7 @@ func (p *PartialSignature) Verify(pubNonce [PubNonceSize]byte,
|
|||||||
combinedNonce [PubNonceSize]byte, keySet []*btcec.PublicKey,
|
combinedNonce [PubNonceSize]byte, keySet []*btcec.PublicKey,
|
||||||
signingKey *btcec.PublicKey, msg [32]byte, signOpts ...SignOption) bool {
|
signingKey *btcec.PublicKey, msg [32]byte, signOpts ...SignOption) bool {
|
||||||
|
|
||||||
pubKey := schnorr.SerializePubKey(signingKey)
|
pubKey := signingKey.SerializeCompressed()
|
||||||
|
|
||||||
return verifyPartialSig(
|
return verifyPartialSig(
|
||||||
p, pubNonce, combinedNonce, keySet, pubKey, msg, signOpts...,
|
p, pubNonce, combinedNonce, keySet, pubKey, msg, signOpts...,
|
||||||
@ -398,7 +409,6 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte,
|
|||||||
// Next we'll parse out the two public nonces into something we can
|
// Next we'll parse out the two public nonces into something we can
|
||||||
// use.
|
// use.
|
||||||
//
|
//
|
||||||
|
|
||||||
// Compute the hash of all the keys here as we'll need it do aggregate
|
// Compute the hash of all the keys here as we'll need it do aggregate
|
||||||
// the keys and also at the final step of verification.
|
// the keys and also at the final step of verification.
|
||||||
keysHash := keyHashFingerprint(keySet, opts.sortKeys)
|
keysHash := keyHashFingerprint(keySet, opts.sortKeys)
|
||||||
@ -458,7 +468,6 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte,
|
|||||||
// With our nonce blinding value, we'll now combine both the public
|
// With our nonce blinding value, we'll now combine both the public
|
||||||
// nonces, using the blinding factor to tweak the second nonce:
|
// nonces, using the blinding factor to tweak the second nonce:
|
||||||
// * R = R_1 + b*R_2
|
// * R = R_1 + b*R_2
|
||||||
|
|
||||||
var nonce btcec.JacobianPoint
|
var nonce btcec.JacobianPoint
|
||||||
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
||||||
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
||||||
@ -516,7 +525,7 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte,
|
|||||||
var e btcec.ModNScalar
|
var e btcec.ModNScalar
|
||||||
e.SetByteSlice(challengeBytes[:])
|
e.SetByteSlice(challengeBytes[:])
|
||||||
|
|
||||||
signingKey, err := schnorr.ParsePubKey(pubKey)
|
signingKey, err := btcec.ParsePubKey(pubKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -527,27 +536,24 @@ func verifyPartialSig(partialSig *PartialSignature, pubNonce [PubNonceSize]byte,
|
|||||||
|
|
||||||
// If the combined key has an odd y coordinate, then we'll negate
|
// If the combined key has an odd y coordinate, then we'll negate
|
||||||
// parity factor for the signing key.
|
// parity factor for the signing key.
|
||||||
paritySignKey := new(btcec.ModNScalar).SetInt(1)
|
parityCombinedKey := new(btcec.ModNScalar).SetInt(1)
|
||||||
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
||||||
if combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd {
|
if combinedKeyBytes[0] == secp.PubKeyFormatCompressedOdd {
|
||||||
paritySignKey.Negate()
|
parityCombinedKey.Negate()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next, we'll construct the final parity factor by multiplying the
|
// Next, we'll construct the final parity factor by multiplying the
|
||||||
// sign key parity factor with the accumulated parity factor for all
|
// sign key parity factor with the accumulated parity factor for all
|
||||||
// the keys.
|
// the keys.
|
||||||
finalParityFactor := paritySignKey.Mul(parityAcc)
|
finalParityFactor := parityCombinedKey.Mul(parityAcc)
|
||||||
|
|
||||||
// Now we'll multiply the parity factor by our signing key, which'll
|
|
||||||
// take care of the amount of negation needed.
|
|
||||||
var signKeyJ btcec.JacobianPoint
|
var signKeyJ btcec.JacobianPoint
|
||||||
signingKey.AsJacobian(&signKeyJ)
|
signingKey.AsJacobian(&signKeyJ)
|
||||||
btcec.ScalarMultNonConst(finalParityFactor, &signKeyJ, &signKeyJ)
|
|
||||||
|
|
||||||
// In the final set, we'll check that: s*G == R' + e*a*P.
|
// In the final set, we'll check that: s*G == R' + e*a*g*P.
|
||||||
var sG, rP btcec.JacobianPoint
|
var sG, rP btcec.JacobianPoint
|
||||||
btcec.ScalarBaseMultNonConst(s, &sG)
|
btcec.ScalarBaseMultNonConst(s, &sG)
|
||||||
btcec.ScalarMultNonConst(e.Mul(a), &signKeyJ, &rP)
|
btcec.ScalarMultNonConst(e.Mul(a).Mul(finalParityFactor), &signKeyJ, &rP)
|
||||||
btcec.AddNonConst(&rP, &pubNonceJ, &rP)
|
btcec.AddNonConst(&rP, &pubNonceJ, &rP)
|
||||||
|
|
||||||
sG.ToAffine()
|
sG.ToAffine()
|
||||||
|
391
btcec/schnorr/musig2/sign_test.go
Normal file
391
btcec/schnorr/musig2/sign_test.go
Normal file
@ -0,0 +1,391 @@
|
|||||||
|
// Copyright 2013-2022 The btcsuite developers
|
||||||
|
|
||||||
|
package musig2
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/btcsuite/btcd/btcec/v2"
|
||||||
|
secp "github.com/decred/dcrd/dcrec/secp256k1/v4"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
signVerifyTestVectorFileName = "sign_verify_vectors.json"
|
||||||
|
|
||||||
|
sigCombineTestVectorFileName = "sig_agg_vectors.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
type signVerifyValidCase struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
AggNonceIndex int `json:"aggnonce_index"`
|
||||||
|
|
||||||
|
MsgIndex int `json:"msg_index"`
|
||||||
|
|
||||||
|
SignerIndex int `json:"signer_index"`
|
||||||
|
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type signErrorCase struct {
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
AggNonceIndex int `json:"aggnonce_index"`
|
||||||
|
|
||||||
|
MsgIndex int `json:"msg_index"`
|
||||||
|
|
||||||
|
SecNonceIndex int `json:"secnonce_index"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type verifyFailCase struct {
|
||||||
|
Sig string `json:"sig"`
|
||||||
|
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
MsgIndex int `json:"msg_index"`
|
||||||
|
|
||||||
|
SignerIndex int `json:"signer_index"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type verifyErrorCase struct {
|
||||||
|
Sig string `json:"sig"`
|
||||||
|
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
MsgIndex int `json:"msg_index"`
|
||||||
|
|
||||||
|
SignerIndex int `json:"signer_index"`
|
||||||
|
|
||||||
|
Comment string `json:"comment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type signVerifyTestVectors struct {
|
||||||
|
PrivKey string `json:"sk"`
|
||||||
|
|
||||||
|
PubKeys []string `json:"pubkeys"`
|
||||||
|
|
||||||
|
PrivNonces []string `json:"secnonces"`
|
||||||
|
|
||||||
|
PubNonces []string `json:"pnonces"`
|
||||||
|
|
||||||
|
AggNonces []string `json:"aggnonces"`
|
||||||
|
|
||||||
|
Msgs []string `json:"msgs"`
|
||||||
|
|
||||||
|
ValidCases []signVerifyValidCase `json:"valid_test_cases"`
|
||||||
|
|
||||||
|
SignErrorCases []signErrorCase `json:"sign_error_test_cases"`
|
||||||
|
|
||||||
|
VerifyFailCases []verifyFailCase `json:"verify_fail_test_cases"`
|
||||||
|
|
||||||
|
VerifyErrorCases []verifyErrorCase `json:"verify_error_test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMusig2SignVerify tests that we pass the musig2 verification tests.
|
||||||
|
func TestMusig2SignVerify(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, signVerifyTestVectorFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases signVerifyTestVectors
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
privKey, _ := btcec.PrivKeyFromBytes(mustParseHex(testCases.PrivKey))
|
||||||
|
|
||||||
|
for i, testCase := range testCases.ValidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("valid_case_%v", i)
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
pubKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
pubNonces := pubNoncesFromIndices(
|
||||||
|
t, testCase.NonceIndices, testCases.PubNonces,
|
||||||
|
)
|
||||||
|
|
||||||
|
combinedNonce, err := AggregateNonces(pubNonces)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var msg [32]byte
|
||||||
|
copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex]))
|
||||||
|
|
||||||
|
var secNonce [SecNonceSize]byte
|
||||||
|
copy(secNonce[:], mustParseHex(testCases.PrivNonces[0]))
|
||||||
|
|
||||||
|
partialSig, err := Sign(
|
||||||
|
secNonce, privKey, combinedNonce, pubKeys,
|
||||||
|
msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
var partialSigBytes [32]byte
|
||||||
|
partialSig.S.PutBytesUnchecked(partialSigBytes[:])
|
||||||
|
|
||||||
|
require.Equal(
|
||||||
|
t, hex.EncodeToString(partialSigBytes[:]),
|
||||||
|
hex.EncodeToString(mustParseHex(testCase.Expected)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases.SignErrorCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("invalid_case_%v",
|
||||||
|
strings.ToLower(testCase.Comment))
|
||||||
|
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
pubKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var aggNonce [PubNonceSize]byte
|
||||||
|
copy(
|
||||||
|
aggNonce[:],
|
||||||
|
mustParseHex(
|
||||||
|
testCases.AggNonces[testCase.AggNonceIndex],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
var msg [32]byte
|
||||||
|
copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex]))
|
||||||
|
|
||||||
|
var secNonce [SecNonceSize]byte
|
||||||
|
copy(
|
||||||
|
secNonce[:],
|
||||||
|
mustParseHex(
|
||||||
|
testCases.PrivNonces[testCase.SecNonceIndex],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
_, err = Sign(
|
||||||
|
secNonce, privKey, aggNonce, pubKeys,
|
||||||
|
msg,
|
||||||
|
)
|
||||||
|
require.Error(t, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases.VerifyFailCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("verify_fail_%v",
|
||||||
|
strings.ToLower(testCase.Comment))
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
pubKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
pubNonces := pubNoncesFromIndices(
|
||||||
|
t, testCase.NonceIndices, testCases.PubNonces,
|
||||||
|
)
|
||||||
|
|
||||||
|
combinedNonce, err := AggregateNonces(pubNonces)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var msg [32]byte
|
||||||
|
copy(
|
||||||
|
msg[:],
|
||||||
|
mustParseHex(testCases.Msgs[testCase.MsgIndex]),
|
||||||
|
)
|
||||||
|
|
||||||
|
var secNonce [SecNonceSize]byte
|
||||||
|
copy(secNonce[:], mustParseHex(testCases.PrivNonces[0]))
|
||||||
|
|
||||||
|
signerNonce := secNonceToPubNonce(secNonce)
|
||||||
|
|
||||||
|
var partialSig PartialSignature
|
||||||
|
err = partialSig.Decode(
|
||||||
|
bytes.NewReader(mustParseHex(testCase.Sig)),
|
||||||
|
)
|
||||||
|
if err != nil && strings.Contains(testCase.Comment, "group size") {
|
||||||
|
require.ErrorIs(t, err, ErrPartialSigInvalid)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = verifyPartialSig(
|
||||||
|
&partialSig, signerNonce, combinedNonce,
|
||||||
|
pubKeys, privKey.PubKey().SerializeCompressed(),
|
||||||
|
msg,
|
||||||
|
)
|
||||||
|
require.Error(t, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases.VerifyErrorCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("verify_error_%v",
|
||||||
|
strings.ToLower(testCase.Comment))
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
switch testCase.Comment {
|
||||||
|
case "Invalid pubnonce":
|
||||||
|
pubNonces := pubNoncesFromIndices(
|
||||||
|
t, testCase.NonceIndices, testCases.PubNonces,
|
||||||
|
)
|
||||||
|
_, err := AggregateNonces(pubNonces)
|
||||||
|
require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve)
|
||||||
|
|
||||||
|
case "Invalid pubkey":
|
||||||
|
_, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.ErrorIs(t, err, secp.ErrPubKeyNotOnCurve)
|
||||||
|
|
||||||
|
default:
|
||||||
|
t.Fatalf("unhandled case: %v", testCase.Comment)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
type sigCombineValidCase struct {
|
||||||
|
AggNonce string `json:"aggnonce"`
|
||||||
|
|
||||||
|
NonceIndices []int `json:"nonce_indices"`
|
||||||
|
|
||||||
|
Indices []int `json:"key_indices"`
|
||||||
|
|
||||||
|
TweakIndices []int `json:"tweak_indices"`
|
||||||
|
|
||||||
|
IsXOnly []bool `json:"is_xonly"`
|
||||||
|
|
||||||
|
PSigIndices []int `json:"psig_indices"`
|
||||||
|
|
||||||
|
Expected string `json:"expected"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type sigCombineTestVectors struct {
|
||||||
|
PubKeys []string `json:"pubkeys"`
|
||||||
|
|
||||||
|
PubNonces []string `json:"pnonces"`
|
||||||
|
|
||||||
|
Tweaks []string `json:"tweaks"`
|
||||||
|
|
||||||
|
Psigs []string `json:"psigs"`
|
||||||
|
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
|
||||||
|
ValidCases []sigCombineValidCase `json:"valid_test_cases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func pSigsFromIndicies(t *testing.T, sigs []string, indices []int) []*PartialSignature {
|
||||||
|
pSigs := make([]*PartialSignature, len(indices))
|
||||||
|
for i, idx := range indices {
|
||||||
|
var pSig PartialSignature
|
||||||
|
err := pSig.Decode(bytes.NewReader(mustParseHex(sigs[idx])))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
pSigs[i] = &pSig
|
||||||
|
}
|
||||||
|
|
||||||
|
return pSigs
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMusig2SignCombine tests that we pass the musig2 sig combination tests.
|
||||||
|
func TestMusig2SignCombine(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
testVectorPath := path.Join(
|
||||||
|
testVectorBaseDir, sigCombineTestVectorFileName,
|
||||||
|
)
|
||||||
|
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var testCases sigCombineTestVectors
|
||||||
|
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||||
|
|
||||||
|
var msg [32]byte
|
||||||
|
copy(msg[:], mustParseHex(testCases.Msg))
|
||||||
|
|
||||||
|
for i, testCase := range testCases.ValidCases {
|
||||||
|
testCase := testCase
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("valid_case_%v", i)
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
pubKeys, err := keysFromIndices(
|
||||||
|
t, testCase.Indices, testCases.PubKeys,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
pubNonces := pubNoncesFromIndices(
|
||||||
|
t, testCase.NonceIndices, testCases.PubNonces,
|
||||||
|
)
|
||||||
|
|
||||||
|
partialSigs := pSigsFromIndicies(
|
||||||
|
t, testCases.Psigs, testCase.PSigIndices,
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
combineOpts []CombineOption
|
||||||
|
keyOpts []KeyAggOption
|
||||||
|
)
|
||||||
|
if len(testCase.TweakIndices) > 0 {
|
||||||
|
tweaks := tweaksFromIndices(
|
||||||
|
t, testCase.TweakIndices,
|
||||||
|
testCases.Tweaks, testCase.IsXOnly,
|
||||||
|
)
|
||||||
|
|
||||||
|
combineOpts = append(combineOpts, WithTweakedCombine(
|
||||||
|
msg, pubKeys, tweaks, false,
|
||||||
|
))
|
||||||
|
|
||||||
|
keyOpts = append(keyOpts, WithKeyTweaks(tweaks...))
|
||||||
|
}
|
||||||
|
|
||||||
|
combinedKey, _, _, err := AggregateKeys(
|
||||||
|
pubKeys, false, keyOpts...,
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
combinedNonce, err := AggregateNonces(pubNonces)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
finalNonceJ, _, err := computeSigningNonce(
|
||||||
|
combinedNonce, combinedKey.FinalKey, msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
finalNonceJ.ToAffine()
|
||||||
|
finalNonce := btcec.NewPublicKey(
|
||||||
|
&finalNonceJ.X, &finalNonceJ.Y,
|
||||||
|
)
|
||||||
|
|
||||||
|
combinedSig := CombineSigs(
|
||||||
|
finalNonce, partialSigs, combineOpts...,
|
||||||
|
)
|
||||||
|
require.Equal(t,
|
||||||
|
strings.ToLower(testCase.Expected),
|
||||||
|
hex.EncodeToString(combinedSig.Serialize()),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user