Skip to content

Commit 87394ea

Browse files
jonasnickreal-or-random
authored andcommitted
Add BIP327: MuSig2 for BIP340-compatible Multi-Signatures
1 parent a8a0191 commit 87394ea

13 files changed

+2699
-0
lines changed

README.mediawiki

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1009,6 +1009,13 @@ Those proposing changes should consider that ultimately consent may rest with th
10091009
| Informational
10101010
| Draft
10111011
|-
1012+
| [[bip-0327.mediawiki|327]]
1013+
|
1014+
| MuSig2 for BIP340-compatible Multi-Signatures
1015+
| Jonas Nick, Tim Ruffing, Elliott Jin
1016+
| Informational
1017+
| Draft
1018+
|-
10121019
| [[bip-0329.mediawiki|329]]
10131020
| Applications
10141021
| Wallet Labels Export Format

bip-0327.mediawiki

Lines changed: 828 additions & 0 deletions
Large diffs are not rendered by default.

bip-0327/gen_vectors_helper.py

Lines changed: 184 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
from reference import *
2+
3+
def gen_key_agg_vectors():
4+
print("key_agg_vectors.json: Intermediate tweaking result is point at infinity")
5+
sk = bytes.fromhex("7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671")
6+
pk = individual_pk(sk)
7+
keygen_ctx = key_agg([pk])
8+
aggpoint, _, _ = keygen_ctx
9+
aggsk = key_agg_coeff([pk], pk)*int_from_bytes(sk) % n
10+
t = n - aggsk
11+
assert point_add(point_mul(G, t), aggpoint) == None
12+
is_xonly = False
13+
tweak = bytes_from_int(t)
14+
assert_raises(ValueError, lambda: apply_tweak(keygen_ctx, tweak, is_xonly), lambda e: True)
15+
print(" pubkey:", pk.hex().upper())
16+
print(" tweak: ", tweak.hex().upper())
17+
18+
def check_sign_verify_vectors():
19+
with open(os.path.join(sys.path[0], 'vectors', 'sign_verify_vectors.json')) as f:
20+
test_data = json.load(f)
21+
X = fromhex_all(test_data["pubkeys"])
22+
pnonce = fromhex_all(test_data["pnonces"])
23+
aggnonces = fromhex_all(test_data["aggnonces"])
24+
msgs = fromhex_all(test_data["msgs"])
25+
26+
valid_test_cases = test_data["valid_test_cases"]
27+
for (i, test_case) in enumerate(valid_test_cases):
28+
pubkeys = [X[i] for i in test_case["key_indices"]]
29+
pubnonces = [pnonce[i] for i in test_case["nonce_indices"]]
30+
aggnonce = aggnonces[test_case["aggnonce_index"]]
31+
assert nonce_agg(pubnonces) == aggnonce
32+
msg = msgs[test_case["msg_index"]]
33+
signer_index = test_case["signer_index"]
34+
expected = bytes.fromhex(test_case["expected"])
35+
36+
session_ctx = SessionContext(aggnonce, pubkeys, [], [], msg)
37+
(Q, _, _, _, R, _) = get_session_values(session_ctx)
38+
# Make sure the vectors include tests for both variants of Q and R
39+
if i == 0:
40+
assert has_even_y(Q) and not has_even_y(R)
41+
if i == 1:
42+
assert not has_even_y(Q) and has_even_y(R)
43+
if i == 2:
44+
assert has_even_y(Q) and has_even_y(R)
45+
46+
def check_tweak_vectors():
47+
with open(os.path.join(sys.path[0], 'vectors', 'tweak_vectors.json')) as f:
48+
test_data = json.load(f)
49+
50+
X = fromhex_all(test_data["pubkeys"])
51+
pnonce = fromhex_all(test_data["pnonces"])
52+
tweak = fromhex_all(test_data["tweaks"])
53+
valid_test_cases = test_data["valid_test_cases"]
54+
55+
for (i, test_case) in enumerate(valid_test_cases):
56+
pubkeys = [X[i] for i in test_case["key_indices"]]
57+
tweaks = [tweak[i] for i in test_case["tweak_indices"]]
58+
is_xonly = test_case["is_xonly"]
59+
60+
_, gacc, _ = key_agg_and_tweak(pubkeys, tweaks, is_xonly)
61+
# Make sure the vectors include tests for gacc = 1 and -1
62+
if i == 0:
63+
assert gacc == n - 1
64+
if i == 1:
65+
assert gacc == 1
66+
67+
def sig_agg_vectors():
68+
print("sig_agg_vectors.json:")
69+
sk = fromhex_all([
70+
"7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671",
71+
"3874D22DE7A7290C49CE7F1DC17D1A8CD8918E1F799055139D57FC0988D04D10",
72+
"D0EA1B84481ED1BCFAA39D6775F97BDC9BF8D7C02FD0C009D6D85BAE5EC7B87A",
73+
"FC2BF9E056B273AF0A8AABB815E541A3552C142AC10D4FE584F01D2CAB84F577"])
74+
pubkeys = list(map(lambda secret: individual_pk(secret), sk))
75+
indices32 = [i.to_bytes(32, 'big') for i in range(6)]
76+
secnonces, pnonces = zip(*[nonce_gen_internal(r, None, pubkeys[0], None, None, None) for r in indices32])
77+
tweaks = fromhex_all([
78+
"B511DA492182A91B0FFB9A98020D55F260AE86D7ECBD0399C7383D59A5F2AF7C",
79+
"A815FE049EE3C5AAB66310477FBC8BCCCAC2F3395F59F921C364ACD78A2F48DC",
80+
"75448A87274B056468B977BE06EB1E9F657577B7320B0A3376EA51FD420D18A8"])
81+
msg = bytes.fromhex("599C67EA410D005B9DA90817CF03ED3B1C868E4DA4EDF00A5880B0082C237869")
82+
83+
psigs = [None] * 9
84+
85+
valid_test_cases = [
86+
{
87+
"aggnonce": None,
88+
"nonce_indices": [0, 1],
89+
"key_indices": [0, 1],
90+
"tweak_indices": [],
91+
"is_xonly": [],
92+
"psig_indices": [0, 1],
93+
}, {
94+
"aggnonce": None,
95+
"nonce_indices": [0, 2],
96+
"key_indices": [0, 2],
97+
"tweak_indices": [],
98+
"is_xonly": [],
99+
"psig_indices": [2, 3],
100+
}, {
101+
"aggnonce": None,
102+
"nonce_indices": [0, 3],
103+
"key_indices": [0, 2],
104+
"tweak_indices": [0],
105+
"is_xonly": [False],
106+
"psig_indices": [4, 5],
107+
}, {
108+
"aggnonce": None,
109+
"nonce_indices": [0, 4],
110+
"key_indices": [0, 3],
111+
"tweak_indices": [0, 1, 2],
112+
"is_xonly": [True, False, True],
113+
"psig_indices": [6, 7],
114+
},
115+
]
116+
for (i, test_case) in enumerate(valid_test_cases):
117+
is_xonly = test_case["is_xonly"]
118+
nonce_indices = test_case["nonce_indices"]
119+
key_indices = test_case["key_indices"]
120+
psig_indices = test_case["psig_indices"]
121+
vec_pnonces = [pnonces[i] for i in nonce_indices]
122+
vec_pubkeys = [pubkeys[i] for i in key_indices]
123+
vec_tweaks = [tweaks[i] for i in test_case["tweak_indices"]]
124+
125+
aggnonce = nonce_agg(vec_pnonces)
126+
test_case["aggnonce"] = aggnonce.hex().upper()
127+
session_ctx = SessionContext(aggnonce, vec_pubkeys, vec_tweaks, is_xonly, msg)
128+
129+
for j in range(len(key_indices)):
130+
# WARNING: An actual implementation should _not_ copy the secnonce.
131+
# Reusing the secnonce, as we do here for testing purposes, can leak the
132+
# secret key.
133+
secnonce_tmp = bytearray(secnonces[nonce_indices[j]][:64] + pubkeys[key_indices[j]])
134+
psigs[psig_indices[j]] = sign(secnonce_tmp, sk[key_indices[j]], session_ctx)
135+
sig = partial_sig_agg([psigs[i] for i in psig_indices], session_ctx)
136+
keygen_ctx = key_agg_and_tweak(vec_pubkeys, vec_tweaks, is_xonly)
137+
# To maximize coverage of the sig_agg algorithm, we want one public key
138+
# point with an even and one with an odd Y coordinate.
139+
if i == 0:
140+
assert(has_even_y(keygen_ctx[0]))
141+
if i == 1:
142+
assert(not has_even_y(keygen_ctx[0]))
143+
aggpk = get_xonly_pk(keygen_ctx)
144+
assert schnorr_verify(msg, aggpk, sig)
145+
test_case["expected"] = sig.hex().upper()
146+
147+
error_test_case = {
148+
"aggnonce": None,
149+
"nonce_indices": [0, 4],
150+
"key_indices": [0, 3],
151+
"tweak_indices": [0, 1, 2],
152+
"is_xonly": [True, False, True],
153+
"psig_indices": [7, 8],
154+
"error": {
155+
"type": "invalid_contribution",
156+
"signer": 1
157+
},
158+
"comment": "Partial signature is invalid because it exceeds group size"
159+
}
160+
161+
psigs[8] = bytes.fromhex("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141")
162+
163+
vec_pnonces = [pnonces[i] for i in error_test_case["nonce_indices"]]
164+
aggnonce = nonce_agg(vec_pnonces)
165+
error_test_case["aggnonce"] = aggnonce.hex().upper()
166+
167+
def tohex_all(l):
168+
return list(map(lambda e: e.hex().upper(), l))
169+
170+
print(json.dumps({
171+
"pubkeys": tohex_all(pubkeys),
172+
"pnonces": tohex_all(pnonces),
173+
"tweaks": tohex_all(tweaks),
174+
"psigs": tohex_all(psigs),
175+
"msg": msg.hex().upper(),
176+
"valid_test_cases": valid_test_cases,
177+
"error_test_cases": [error_test_case]
178+
}, indent=4))
179+
180+
gen_key_agg_vectors()
181+
check_sign_verify_vectors()
182+
check_tweak_vectors()
183+
print()
184+
sig_agg_vectors()

0 commit comments

Comments
 (0)