7 #ifndef SECP256K1_MODULE_BULLETPROOF_CIRCUIT_IMPL
8 #define SECP256K1_MODULE_BULLETPROOF_CIRCUIT_IMPL
25 const int is_g = idx % 2 == 0;
31 secp256k1_scalar_mul(sc, &ctx->
comp_circ->
l3[idx / 2], &ctx->
x);
32 secp256k1_scalar_add(sc, sc, &ctx->
assn->
ao[idx / 2]);
33 secp256k1_scalar_mul(sc, sc, &ctx->
x);
35 secp256k1_scalar_mul(sc, &ctx->
comp_circ->
l3[idx / 2], &ctx->
x2);
37 secp256k1_scalar_add(sc, sc, &ctx->
comp_circ->
l1[idx / 2]);
38 secp256k1_scalar_mul(sc, sc, &ctx->
x);
41 secp256k1_scalar_mul(sc, &ctx->
comp_circ->
r3[idx / 2], &ctx->
x2);
42 secp256k1_scalar_add(sc, sc, &ctx->
comp_circ->
r1[idx / 2]);
43 secp256k1_scalar_mul(sc, sc, &ctx->
x);
44 secp256k1_scalar_add(sc, sc, &ctx->
comp_circ->
r0[idx / 2]);
55 static int secp256k1_bulletproof_relation66_prove_impl(
const secp256k1_ecmult_context *ecmult_ctx,
secp256k1_scratch *scratch,
unsigned char *proof,
size_t *plen,
const secp256k1_bulletproof_circuit_assignment *assn,
const secp256k1_ge *commitp,
const secp256k1_scalar *blinds,
size_t nc,
const secp256k1_ge *value_gen,
const secp256k1_bulletproof_circuit *circ,
const secp256k1_bulletproof_generators *gens,
const unsigned char *nonce,
const unsigned char *extra_commit,
size_t extra_commit_len) {
59 unsigned char commit[32] = {0};
75 if (*plen < 64 + 256 + 1) {
81 secp256k1_bulletproof_update_commit_n(commit, commitp, nc);
83 secp256k1_bulletproof_update_commit_n(commit, value_gen, 1);
85 if (extra_commit != NULL) {
86 secp256k1_sha256_initialize(&
sha256);
87 secp256k1_sha256_write(&
sha256, commit, 32);
88 secp256k1_sha256_write(&
sha256, extra_commit, extra_commit_len);
89 secp256k1_sha256_finalize(&
sha256, commit);
93 secp256k1_scalar_chacha20(&alpha, &beta, nonce, 0);
94 secp256k1_scalar_chacha20(&rho, &tau1, nonce, 1);
95 secp256k1_scalar_chacha20(&tau3, &tau4, nonce, 2);
96 secp256k1_scalar_chacha20(&tau5, &tau6, nonce, 3);
105 secp256k1_ecmult_const(&aij, gens->
blinding_gen, &alpha, 256);
106 for (i = 0; i < circ->
n_bits; i++) {
109 secp256k1_ge_neg(&aterm, &aterm);
110 secp256k1_fe_cmov(&aterm.
x, &gens->
gens[i].
x, secp256k1_scalar_is_one(&assn->
al[i]));
111 secp256k1_fe_cmov(&aterm.
y, &gens->
gens[i].
y, secp256k1_scalar_is_one(&assn->
al[i]));
112 secp256k1_gej_add_ge(&aij, &aij, &aterm);
114 secp256k1_ge_set_gej(&tmpge, &aij);
116 secp256k1_gej_add_ge(&aij, &aij, &tmpge);
117 secp256k1_ge_set_gej(&tmpge, &aij);
119 secp256k1_gej_add_ge(&aij, &aij, &tmpge);
123 secp256k1_ecmult_const(&sj, gens->
blinding_gen, &rho, 256);
124 for (i = 0; i < circ->
n_gates; i++) {
128 secp256k1_ecmult_const(&termj, &gens->
gens[i], &comp_circ->
l3[i], 256);
129 secp256k1_ge_set_gej(&term, &termj);
130 secp256k1_gej_add_ge(&sj, &sj, &term);
131 secp256k1_ecmult_const(&termj, &gens->
gens[i + gens->
n/2], &comp_circ->
r3[i], 256);
132 secp256k1_ge_set_gej(&term, &termj);
133 secp256k1_gej_add_ge(&sj, &sj, &term);
137 secp256k1_ge_set_gej(&out_pt[0], &aij);
138 secp256k1_ge_set_gej(&out_pt[1], &aoj);
139 secp256k1_ge_set_gej(&out_pt[2], &sj);
141 secp256k1_bulletproof_update_commit_n(commit, &out_pt[0], 3);
142 secp256k1_scalar_set_b32(&y, commit, &overflow);
143 if (overflow || secp256k1_scalar_is_zero(&y)) {
144 secp256k1_scratch_deallocate_frame(scratch);
147 secp256k1_bulletproof_update_commit_n(commit, NULL, 0);
148 secp256k1_scalar_set_b32(&z, commit, &overflow);
149 if (overflow || secp256k1_scalar_is_zero(&z)) {
150 secp256k1_scratch_deallocate_frame(scratch);
153 secp256k1_scalar_inverse_var(&yinv, &y);
172 for (i = 0; i < 6; i++) {
173 secp256k1_scalar_clear(&t[i + 1]);
175 for (i = 0; i < circ->
n_gates; i++) {
178 if (i < assn->n_gates) {
181 secp256k1_scalar_clear(&ao);
185 secp256k1_scalar_mul(&tmp, &comp_circ->
l1[i], &comp_circ->
r0[i]);
186 secp256k1_scalar_add(&t[1], &t[1], &tmp);
188 secp256k1_scalar_mul(&tmp, &comp_circ->
l1[i], &comp_circ->
r1[i]);
189 secp256k1_scalar_add(&t[2], &t[2], &tmp);
190 secp256k1_scalar_mul(&tmp, &ao, &comp_circ->
r0[i]);
191 secp256k1_scalar_add(&t[2], &t[2], &tmp);
193 secp256k1_scalar_mul(&tmp, &ao, &comp_circ->
r1[i]);
194 secp256k1_scalar_add(&t[3], &t[3], &tmp);
195 secp256k1_scalar_mul(&tmp, &comp_circ->
l3[i], &comp_circ->
r0[i]);
196 secp256k1_scalar_add(&t[3], &t[3], &tmp);
198 secp256k1_scalar_mul(&tmp, &comp_circ->
l3[i], &comp_circ->
r1[i]);
199 secp256k1_scalar_add(&t[4], &t[4], &tmp);
200 secp256k1_scalar_mul(&tmp, &comp_circ->
l1[i], &comp_circ->
r3[i]);
201 secp256k1_scalar_add(&t[4], &t[4], &tmp);
203 secp256k1_scalar_mul(&tmp, &ao, &comp_circ->
r3[i]);
204 secp256k1_scalar_add(&t[5], &t[5], &tmp);
206 secp256k1_scalar_mul(&tmp, &comp_circ->
l3[i], &comp_circ->
r3[i]);
207 secp256k1_scalar_add(&t[6], &t[6], &tmp);
211 secp256k1_bulletproof_vector_commit(&aij, &t[1], value_gen, 1, &tau1, gens->
blinding_gen);
212 secp256k1_ge_set_gej(&out_pt[3], &aij);
214 secp256k1_bulletproof_vector_commit(&aij, &t[3], value_gen, 1, &tau3, gens->
blinding_gen);
215 secp256k1_ge_set_gej(&out_pt[4], &aij);
217 secp256k1_bulletproof_vector_commit(&aij, &t[4], value_gen, 1, &tau4, gens->
blinding_gen);
218 secp256k1_ge_set_gej(&out_pt[5], &aij);
220 secp256k1_bulletproof_vector_commit(&aij, &t[5], value_gen, 1, &tau5, gens->
blinding_gen);
221 secp256k1_ge_set_gej(&out_pt[6], &aij);
223 secp256k1_bulletproof_vector_commit(&aij, &t[6], value_gen, 1, &tau6, gens->
blinding_gen);
224 secp256k1_ge_set_gej(&out_pt[7], &aij);
227 secp256k1_bulletproof_update_commit_n(commit, &out_pt[3], 5);
228 secp256k1_scalar_set_b32(&x, commit, &overflow);
229 if (overflow || secp256k1_scalar_is_zero(&x)) {
230 secp256k1_scratch_deallocate_frame(scratch);
234 secp256k1_scalar_mul(&alpha, &alpha, &x);
235 secp256k1_scalar_mul(&tau1, &tau1, &x);
237 secp256k1_scalar_sqr(&xn, &x);
238 secp256k1_scalar_mul(&beta, &beta, &xn);
239 secp256k1_scalar_clear(&tauv);
242 secp256k1_scalar_mul(&zwv, &comp_circ->
wv[i], &blinds[i]);
243 secp256k1_scalar_add(&tauv, &tauv, &zwv);
245 secp256k1_scalar_mul(&tauv, &tauv, &xn);
247 secp256k1_scalar_mul(&xn, &xn, &x);
248 secp256k1_scalar_mul(&rho, &rho, &xn);
249 secp256k1_scalar_mul(&tau3, &tau3, &xn);
251 secp256k1_scalar_mul(&xn, &xn, &x);
252 secp256k1_scalar_mul(&tau4, &tau4, &xn);
254 secp256k1_scalar_mul(&xn, &xn, &x);
255 secp256k1_scalar_mul(&tau5, &tau5, &xn);
257 secp256k1_scalar_mul(&xn, &xn, &x);
258 secp256k1_scalar_mul(&tau6, &tau6, &xn);
260 secp256k1_scalar_add(&taux, &tau1, &tauv);
261 secp256k1_scalar_add(&taux, &taux, &tau3);
262 secp256k1_scalar_add(&taux, &taux, &tau4);
263 secp256k1_scalar_add(&taux, &taux, &tau5);
264 secp256k1_scalar_add(&taux, &taux, &tau6);
266 secp256k1_scalar_add(&mu, &alpha, &beta);
267 secp256k1_scalar_add(&mu, &mu, &rho);
270 secp256k1_scalar_negate(&mu, &mu);
271 secp256k1_scalar_negate(&taux, &taux);
274 secp256k1_scalar_get_b32(&proof[0], &taux);
275 secp256k1_scalar_get_b32(&proof[32], &mu);
276 secp256k1_bulletproof_serialize_points(&proof[64], out_pt, 8);
279 secp256k1_sha256_initialize(&
sha256);
280 secp256k1_sha256_write(&
sha256, commit, 32);
281 secp256k1_sha256_write(&
sha256, proof, 64);
282 secp256k1_sha256_finalize(&
sha256, commit);
286 secp256k1_scalar_sqr(&abgh_data.
x2, &x);
288 abgh_data.
assn = assn;
289 *plen -= 64 + 256 + 1;
290 if (secp256k1_bulletproof_inner_product_prove_impl(ecmult_ctx, scratch, &proof[64 + 256 + 1], plen, gens, &yinv, circ->
n_gates, secp256k1_bulletproof_circuit_abgh_callback, (
void *) &abgh_data, commit) == 0) {
291 secp256k1_scratch_deallocate_frame(scratch);
294 *plen += 64 + 256 + 1;
296 secp256k1_scratch_deallocate_frame(scratch);
323 if (idx < ctx->n_gates) {
324 secp256k1_scalar_mul(sc, &ctx->
comp_circ->
wr[idx], &ctx->
x);
325 secp256k1_scalar_mul(sc, sc, randomizer);
326 }
else if (idx < 2 * ctx->n_gates) {
330 secp256k1_scalar_set_int(&dot, 1);
331 secp256k1_scalar_negate(&dot, &dot);
334 secp256k1_scalar_mul(sc, sc, randomizer);
340 secp256k1_scalar_negate(sc, &ctx->
t);
353 secp256k1_scalar_sqr(sc, &ctx->
x);
358 secp256k1_scalar_sqr(sc, &ctx->
x);
359 secp256k1_scalar_mul(sc, sc, &ctx->
x);
368 if (ctx->
count < 9) {
371 for (i = 0; i < ctx->
count - 3; i++) {
372 secp256k1_scalar_mul(sc, sc, &ctx->
x);
380 VERIFY_CHECK(!
"bulletproof: too many points added by circuit_verify_impl to inner_product_verify_impl");
383 secp256k1_scalar_mul(sc, sc, randomizer);
389 static int secp256k1_bulletproof_relation66_verify_impl(
const secp256k1_ecmult_context *ecmult_ctx,
secp256k1_scratch *scratch,
const unsigned char*
const* proof,
size_t n_proofs,
size_t plen,
const secp256k1_ge*
const* commitp,
size_t *nc,
const secp256k1_ge *value_gen,
const secp256k1_bulletproof_circuit*
const* circ,
const secp256k1_bulletproof_generators *gens,
const unsigned char **extra_commit,
size_t *extra_commit_len) {
396 if (plen < 64 + 256 + 1) {
403 if (!secp256k1_scratch_allocate_frame(scratch, n_proofs * (
sizeof(*ecmult_data) +
sizeof(*innp_ctx)), 2)) {
409 secp256k1_scratch_deallocate_frame(scratch);
413 for (i = 0; i < n_proofs; i++) {
415 unsigned char randomizer82[32] = {0};
416 unsigned char commit[32] = {0};
423 secp256k1_bulletproof_update_commit_n(commit, commitp[i], nc[i]);
425 secp256k1_bulletproof_update_commit_n(commit, value_gen, 1);
426 if (extra_commit != NULL && extra_commit[i] != NULL) {
427 secp256k1_sha256_initialize(&
sha256);
428 secp256k1_sha256_write(&
sha256, commit, 32);
429 secp256k1_sha256_write(&
sha256, extra_commit[i], extra_commit_len[i]);
430 secp256k1_sha256_finalize(&
sha256, commit);
434 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].age[0], &proof[i][64], 0, 8);
435 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].age[1], &proof[i][64], 1, 8);
436 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].age[2], &proof[i][64], 2, 8);
437 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].tge[0], &proof[i][64], 3, 8);
438 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].tge[1], &proof[i][64], 4, 8);
439 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].tge[2], &proof[i][64], 5, 8);
440 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].tge[3], &proof[i][64], 6, 8);
441 secp256k1_bulletproof_deserialize_point(&ecmult_data[i].tge[4], &proof[i][64], 7, 8);
444 secp256k1_bulletproof_update_commit_n(commit, ecmult_data[i].age, 3);
445 secp256k1_scalar_set_b32(&y, commit, &overflow);
446 if (overflow || secp256k1_scalar_is_zero(&y)) {
447 secp256k1_scratch_deallocate_frame(scratch);
448 secp256k1_scratch_deallocate_frame(scratch);
451 ecmult_data[i].
y = y;
452 secp256k1_scalar_inverse_var(&ecmult_data[i].yinv, &y);
453 secp256k1_bulletproof_update_commit_n(commit, NULL, 0);
454 secp256k1_scalar_set_b32(&ecmult_data[i].z, commit, &overflow);
455 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].z)) {
456 secp256k1_scratch_deallocate_frame(scratch);
457 secp256k1_scratch_deallocate_frame(scratch);
461 secp256k1_bulletproof_update_commit_n(commit, ecmult_data[i].tge, 5);
462 secp256k1_scalar_set_b32(&ecmult_data[i].x, commit, &overflow);
463 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].x)) {
464 secp256k1_scratch_deallocate_frame(scratch);
465 secp256k1_scratch_deallocate_frame(scratch);
472 secp256k1_scalar_set_b32(&taux, &proof[i][0], &overflow);
473 if (overflow || secp256k1_scalar_is_zero(&taux)) {
474 secp256k1_scratch_deallocate_frame(scratch);
475 secp256k1_scratch_deallocate_frame(scratch);
478 secp256k1_scalar_set_b32(&mu, &proof[i][32], &overflow);
479 if (overflow || secp256k1_scalar_is_zero(&mu)) {
480 secp256k1_scratch_deallocate_frame(scratch);
481 secp256k1_scratch_deallocate_frame(scratch);
486 secp256k1_scalar_set_b32(&ecmult_data[i].t, &proof[i][64 + 256 + 1], &overflow);
487 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].t)) {
488 secp256k1_scratch_deallocate_frame(scratch);
489 secp256k1_scratch_deallocate_frame(scratch);
494 secp256k1_sha256_initialize(&
sha256);
495 secp256k1_sha256_write(&
sha256, commit, 32);
496 secp256k1_sha256_write(&
sha256, proof[i], 64);
497 secp256k1_sha256_finalize(&
sha256, commit);
499 secp256k1_sha256_initialize(&
sha256);
500 secp256k1_sha256_write(&
sha256, commit, 32);
501 secp256k1_sha256_finalize(&
sha256, randomizer82);
502 secp256k1_scalar_set_b32(&ecmult_data[i].randomizer82, randomizer82, &overflow);
503 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].randomizer82)) {
504 secp256k1_scratch_deallocate_frame(scratch);
505 secp256k1_scratch_deallocate_frame(scratch);
510 ecmult_data[i].
count = 0;
516 ecmult_data[i].
commits = commitp[i];
525 secp256k1_scalar_mul(&taux, &taux, &ecmult_data[i].randomizer82);
526 secp256k1_scalar_add(&mu, &mu, &taux);
528 innp_ctx[i].
proof = &proof[i][64 + 256 + 1];
530 innp_ctx[i].
yinv = ecmult_data[i].
yinv;
531 memcpy(innp_ctx[i].commit, commit, 32);
532 innp_ctx[i].
rangeproof_cb = secp256k1_bulletproof_circuit_vfy_callback;
536 ret = secp256k1_bulletproof_inner_product_verify_impl(ecmult_ctx, scratch, gens, circ[0]->n_gates, innp_ctx, n_proofs, plen - (64 + 256 + 1), 1);
537 secp256k1_scratch_deallocate_frame(scratch);
538 secp256k1_scratch_deallocate_frame(scratch);