7 #ifndef SECP256K1_MODULE_BULLETPROOF_RANGEPROOF_IMPL
8 #define SECP256K1_MODULE_BULLETPROOF_RANGEPROOF_IMPL
46 secp256k1_scalar_mul(&ctx->
z_randomized, &ctx->
z, randomizer);
51 }
else if (idx < 2 * ctx->n) {
53 const size_t commit_idx = (idx - ctx->
n) / nbits;
54 const size_t bit_idx = (idx - ctx->
n) % nbits;
59 secp256k1_scalar_mul(&tmp, &ctx->
z, &ctx->
yinvn);
60 secp256k1_scalar_sqr(&ctx->
zsq, &ctx->
z);
61 for (i = 0; i < commit_idx; i++) {
62 secp256k1_scalar_mul(&ctx->
zsq, &ctx->
zsq, &tmp);
64 secp256k1_scalar_mul(&ctx->
zsq, &ctx->
zsq, randomizer);
68 secp256k1_scalar_mul(&ctx->
zsq, &ctx->
zsq, &ctx->
yinv);
69 secp256k1_scalar_add(&ctx->
zsq, &ctx->
zsq, &ctx->
zsq);
80 secp256k1_scalar_set_int(sc, 1);
89 secp256k1_scalar_clear(&twosum);
90 secp256k1_scalar_clear(&yn);
91 secp256k1_scalar_set_int(&tmp, 1);
93 secp256k1_scalar_sqr(&ctx->
zsq, &ctx->
z);
94 secp256k1_scalar_negate(sc, &ctx->
zsq);
95 secp256k1_scalar_add(sc, sc, &ctx->
z);
103 secp256k1_scalar_clear(&twon);
104 for (j = 0; j < nbits; j++) {
105 secp256k1_scalar_mul(&yn, &yn, &ctx->
y);
106 secp256k1_scalar_add(&twon, &twon, &twon);
108 secp256k1_scalar_add(&yn, &yn, &tmp);
109 secp256k1_scalar_add(&twon, &twon, &tmp);
112 secp256k1_scalar_mul(&negzn, &ctx->
zsq, &ctx->
negz);
113 for (j = 0; j < i; j++) {
114 secp256k1_scalar_mul(&negzn, &negzn, &ctx->
z);
118 secp256k1_scalar_set_int(&mv, ctx->
min_value[i]);
119 secp256k1_scalar_mul(&mv, &mv, &ctx->
negz);
120 secp256k1_scalar_mul(&mv, &mv, &ctx->
z);
121 for (j = 0; j < i; j++) {
122 secp256k1_scalar_mul(&negzn, &negzn, &ctx->
z);
124 secp256k1_scalar_add(&twosum, &twosum, &mv);
126 secp256k1_scalar_mul(&twon, &twon, &negzn);
127 secp256k1_scalar_add(&twosum, &twosum, &twon);
131 secp256k1_scalar_mul(sc, sc, &yn);
132 secp256k1_scalar_add(sc, sc, &twosum);
133 secp256k1_scalar_negate(&tmp, &ctx->
t);
134 secp256k1_scalar_add(sc, sc, &tmp);
146 secp256k1_scalar_sqr(sc, &ctx->
x);
155 secp256k1_scalar_mul(&ctx->
zsq, &ctx->
zsq, &ctx->
z);
159 secp256k1_scalar_mul(sc, sc, randomizer);
165 static int secp256k1_bulletproof_rangeproof_verify_impl(
const secp256k1_ecmult_context *ecmult_ctx,
secp256k1_scratch *scratch,
const unsigned char*
const* proof,
const size_t n_proofs,
const size_t plen,
size_t nbits,
const uint64_t*
const* min_value,
const secp256k1_ge*
const* commitp,
size_t n_commits,
const secp256k1_ge *value_gen,
const secp256k1_bulletproof_generators *gens,
const unsigned char*
const* extra_commit,
size_t *extra_commit_len) {
170 int same_generators = 1;
173 if (secp256k1_popcountl(nbits) != 1 || nbits >
MAX_NBITS) {
176 if (plen < 64 + 128 + 1 + 32) {
183 if (!secp256k1_scratch_allocate_frame(scratch, n_proofs * (
sizeof(*ecmult_data) +
sizeof(*innp_ctx)), 2)) {
192 for (i = 1; i < n_proofs; i++) {
194 if (!secp256k1_fe_equal_var(&value_gen[i].x, &value_gen[i - 1].x) ||
195 !secp256k1_fe_equal_var(&value_gen[i].y, &value_gen[i - 1].y)) {
200 for (i = 0; i < n_proofs; i++) {
202 unsigned char commit[32] = {0};
203 unsigned char randomizer61[32] = {0};
210 if (min_value != NULL && min_value[i] != NULL) {
211 unsigned char len[4];
212 secp256k1_sha256_initialize(&
sha256);
213 secp256k1_sha256_write(&
sha256, commit, 32);
215 len[1] = n_commits >> 8;
216 len[2] = n_commits >> 16;
217 len[3] = n_commits >> 24;
218 secp256k1_sha256_write(&
sha256, len, 4);
219 for (j = 0; j < n_commits; j++) {
220 unsigned char vbuf[8];
221 vbuf[0] = min_value[i][j];
222 vbuf[1] = min_value[i][j] >> 8;
223 vbuf[2] = min_value[i][j] >> 16;
224 vbuf[3] = min_value[i][j] >> 24;
225 vbuf[4] = min_value[i][j] >> 32;
226 vbuf[5] = min_value[i][j] >> 40;
227 vbuf[6] = min_value[i][j] >> 48;
228 vbuf[7] = min_value[i][j] >> 56;
229 secp256k1_sha256_write(&
sha256, vbuf, 8);
231 secp256k1_sha256_finalize(&
sha256, commit);
233 for (j = 0; j < n_commits; j++) {
234 secp256k1_bulletproof_update_commit(commit, &commitp[i][j], &value_gen[i]);
236 if (extra_commit != NULL && extra_commit[i] != NULL) {
237 secp256k1_sha256_initialize(&
sha256);
238 secp256k1_sha256_write(&
sha256, commit, 32);
239 secp256k1_sha256_write(&
sha256, extra_commit[i], extra_commit_len[i]);
240 secp256k1_sha256_finalize(&
sha256, commit);
244 if (!secp256k1_bulletproof_deserialize_point(&age, &proof[i][64], 0, 4) ||
245 !secp256k1_bulletproof_deserialize_point(&sge, &proof[i][64], 1, 4)) {
249 secp256k1_bulletproof_update_commit(commit, &age, &sge);
250 secp256k1_scalar_set_b32(&ecmult_data[i].y, commit, &overflow);
251 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].y)) {
252 secp256k1_scratch_deallocate_frame(scratch);
255 secp256k1_bulletproof_update_commit(commit, &age, &sge);
256 secp256k1_scalar_set_b32(&ecmult_data[i].z, commit, &overflow);
257 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].z)) {
258 secp256k1_scratch_deallocate_frame(scratch);
262 if (!secp256k1_bulletproof_deserialize_point(&ecmult_data[i].t1, &proof[i][64], 2, 4) ||
263 !secp256k1_bulletproof_deserialize_point(&ecmult_data[i].t2, &proof[i][64], 3, 4)) {
267 secp256k1_bulletproof_update_commit(commit, &ecmult_data[i].t1, &ecmult_data[i].t2);
268 secp256k1_scalar_set_b32(&ecmult_data[i].x, commit, &overflow);
269 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].x)) {
270 secp256k1_scratch_deallocate_frame(scratch);
275 secp256k1_scalar_inverse_var(&ecmult_data[i].yinv, &ecmult_data[i].y);
276 ecmult_data[i].
yinvn = ecmult_data[i].
yinv;
277 for (j = 0; j < secp256k1_floor_lg(nbits); j++) {
278 secp256k1_scalar_sqr(&ecmult_data[i].yinvn, &ecmult_data[i].yinvn);
280 secp256k1_scalar_sqr(&ecmult_data[i].zsq, &ecmult_data[i].z);
281 secp256k1_scalar_negate(&ecmult_data[i].negz, &ecmult_data[i].z);
284 secp256k1_sha256_initialize(&
sha256);
285 secp256k1_sha256_write(&
sha256, commit, 32);
286 secp256k1_sha256_write(&
sha256, &proof[i][0], 64);
287 secp256k1_sha256_finalize(&
sha256, commit);
289 secp256k1_sha256_initialize(&
sha256);
290 secp256k1_sha256_write(&
sha256, commit, 32);
291 secp256k1_sha256_finalize(&
sha256, randomizer61);
292 secp256k1_scalar_set_b32(&ecmult_data[i].randomizer61, randomizer61, &overflow);
293 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].randomizer61)) {
294 secp256k1_scratch_deallocate_frame(scratch);
299 secp256k1_scalar_set_b32(&taux, &proof[i][0], &overflow);
300 if (overflow || secp256k1_scalar_is_zero(&taux)) {
301 secp256k1_scratch_deallocate_frame(scratch);
304 secp256k1_scalar_set_b32(&mu, &proof[i][32], &overflow);
305 if (overflow || secp256k1_scalar_is_zero(&mu)) {
306 secp256k1_scratch_deallocate_frame(scratch);
311 secp256k1_scalar_set_b32(&ecmult_data[i].t, &proof[i][64 + 128 + 1], &overflow);
312 if (overflow || secp256k1_scalar_is_zero(&ecmult_data[i].t)) {
313 secp256k1_scratch_deallocate_frame(scratch);
318 ecmult_data[i].
a = age;
319 ecmult_data[i].
s = sge;
320 ecmult_data[i].
n = nbits * n_commits;
321 ecmult_data[i].
count = 0;
322 ecmult_data[i].
asset = &value_gen[i];
323 ecmult_data[i].
min_value = min_value == NULL ? NULL : min_value[i];
324 ecmult_data[i].
commit = commitp[i];
326 secp256k1_scalar_mul(&taux, &taux, &ecmult_data[i].randomizer61);
327 secp256k1_scalar_add(&mu, &mu, &taux);
329 innp_ctx[i].
proof = &proof[i][64 + 128 + 1];
331 memcpy(innp_ctx[i].commit, commit, 32);
332 innp_ctx[i].
yinv = ecmult_data[i].
yinv;
333 innp_ctx[i].
rangeproof_cb = secp256k1_bulletproof_rangeproof_vfy_callback;
338 ret = secp256k1_bulletproof_inner_product_verify_impl(ecmult_ctx, scratch, gens, nbits * n_commits, innp_ctx, n_proofs, plen - (64 + 128 + 1), same_generators);
339 secp256k1_scratch_deallocate_frame(scratch);
357 generator->
nonce = nonce;
360 secp256k1_scalar_set_int(&generator->
yn, 1);
361 generator->
nbits = nbits;
362 generator->
val = val;
364 generator->
n_vals = n_vals;
365 generator->
count = 0;
369 const size_t commit_idx = generator->
count / generator->
nbits;
370 const size_t bit_idx = generator->
count % generator->
nbits;
371 const uint64_t mv = generator->
min_val == NULL ? 0 : generator->
min_val[commit_idx];
372 const int bit = ((generator->
val[commit_idx] - mv) >> bit_idx) & 1;
378 secp256k1_scalar_sqr(&generator->
z22n, &generator->
z);
379 for (i = 0; i < commit_idx; i++) {
380 secp256k1_scalar_mul(&generator->
z22n, &generator->
z22n, &generator->
z);
384 secp256k1_scalar_chacha20(&sl, &sr, generator->
nonce, generator->
count + 2);
385 secp256k1_scalar_mul(&sl, &sl, x);
386 secp256k1_scalar_mul(&sr, &sr, x);
388 secp256k1_scalar_set_int(lout, bit);
389 secp256k1_scalar_negate(&negz, &generator->
z);
390 secp256k1_scalar_add(lout, lout, &negz);
391 secp256k1_scalar_add(lout, lout, &sl);
393 secp256k1_scalar_set_int(rout, 1 - bit);
394 secp256k1_scalar_negate(rout, rout);
395 secp256k1_scalar_add(rout, rout, &generator->
z);
396 secp256k1_scalar_add(rout, rout, &sr);
397 secp256k1_scalar_mul(rout, rout, &generator->
yn);
398 secp256k1_scalar_add(rout, rout, &generator->
z22n);
401 secp256k1_scalar_mul(&generator->
yn, &generator->
yn, &generator->
y);
402 secp256k1_scalar_add(&generator->
z22n, &generator->
z22n, &generator->
z22n);
413 const int is_g = idx % 2 == 0;
417 secp256k1_lr_generate(&ctx->
lr_gen, sc, &ctx->
cache, &ctx->
x);
433 unsigned char *proof,
size_t *plen,
const size_t nbits,
434 const uint64_t *value,
const uint64_t *min_value,
438 const unsigned char *extra_commit,
size_t extra_commit_len) {
443 unsigned char commit[32] = {0};
458 if (secp256k1_popcountl(nbits) != 1 || nbits >
MAX_NBITS) {
461 for (i = 0; i < n_commits; i++) {
462 uint64_t mv = min_value == NULL ? 0 : min_value[i];
466 if (nbits < 64 && (value[i] - mv) >= (1ull << nbits)) {
470 if (*plen < 128 + 64 + 1) {
474 secp256k1_scalar_clear(&zero);
477 if (min_value != NULL) {
478 unsigned char len[4];
479 secp256k1_sha256_initialize(&
sha256);
480 secp256k1_sha256_write(&
sha256, commit, 32);
482 len[1] = n_commits >> 8;
483 len[2] = n_commits >> 16;
484 len[3] = n_commits >> 24;
485 secp256k1_sha256_write(&
sha256, len, 4);
486 for (i = 0; i < n_commits; i++) {
487 unsigned char vbuf[8];
488 vbuf[0] = min_value[i];
489 vbuf[1] = min_value[i] >> 8;
490 vbuf[2] = min_value[i] >> 16;
491 vbuf[3] = min_value[i] >> 24;
492 vbuf[4] = min_value[i] >> 32;
493 vbuf[5] = min_value[i] >> 40;
494 vbuf[6] = min_value[i] >> 48;
495 vbuf[7] = min_value[i] >> 56;
496 secp256k1_sha256_write(&
sha256, vbuf, 8);
498 secp256k1_sha256_finalize(&
sha256, commit);
500 for (i = 0; i < n_commits; i++) {
501 secp256k1_bulletproof_update_commit(commit, &commitp[i], value_gen);
503 if (extra_commit != NULL) {
504 secp256k1_sha256_initialize(&
sha256);
505 secp256k1_sha256_write(&
sha256, commit, 32);
506 secp256k1_sha256_write(&
sha256, extra_commit, extra_commit_len);
507 secp256k1_sha256_finalize(&
sha256, commit);
510 secp256k1_scalar_chacha20(&alpha, &rho, nonce, 0);
511 secp256k1_scalar_chacha20(&tau1, &tau2, nonce, 1);
513 if (n_commits == 1) {
515 secp256k1_scalar_set_u64(&vals, value[0]);
516 secp256k1_scalar_negate(&vals, &vals);
517 secp256k1_scalar_add(&alpha, &alpha, &vals);
521 secp256k1_ecmult_const(&aj, &gens->
blinding_gen[0], &alpha, 256);
522 secp256k1_ecmult_const(&sj, &gens->
blinding_gen[0], &rho, 256);
523 for (i = 0; i < n_commits; i++) {
524 for (j = 0; j < nbits; j++) {
526 uint64_t mv = min_value == NULL ? 0 : min_value[i];
527 size_t al = !!((value[i] - mv) & (1ull << j));
532 secp256k1_scalar_chacha20(&sl, &sr, nonce, i * nbits + j + 2);
534 secp256k1_ge_neg(&aterm, &aterm);
535 secp256k1_fe_cmov(&aterm.
x, &gens->
gens[i * nbits + j].
x, al);
536 secp256k1_fe_cmov(&aterm.
y, &gens->
gens[i * nbits + j].
y, al);
538 secp256k1_gej_add_ge(&aj, &aj, &aterm);
540 secp256k1_ecmult_const(&stermj, &gens->
gens[i * nbits + j], &sl, 256);
541 secp256k1_ge_set_gej(&sterm, &stermj);
542 secp256k1_gej_add_ge(&sj, &sj, &sterm);
543 secp256k1_ecmult_const(&stermj, &gens->
gens[i * nbits + j + gens->
n/2], &sr, 256);
544 secp256k1_ge_set_gej(&sterm, &stermj);
545 secp256k1_gej_add_ge(&sj, &sj, &sterm);
550 secp256k1_ge_set_gej(&out_pt[0], &aj);
551 secp256k1_ge_set_gej(&out_pt[1], &sj);
553 secp256k1_bulletproof_update_commit(commit, &out_pt[0], &out_pt[1]);
554 secp256k1_scalar_set_b32(&y, commit, &overflow);
555 if (overflow || secp256k1_scalar_is_zero(&y)) {
558 secp256k1_bulletproof_update_commit(commit, &out_pt[0], &out_pt[1]);
559 secp256k1_scalar_set_b32(&z, commit, &overflow);
560 if (overflow || secp256k1_scalar_is_zero(&z)) {
563 secp256k1_scalar_sqr(&zsq, &z);
567 secp256k1_lr_generator_init(&lr_gen, nonce, &y, &z, nbits, value, min_value, n_commits);
568 secp256k1_scalar_clear(&t0);
569 for (i = 0; i < nbits * n_commits; i++) {
571 secp256k1_lr_generate(&lr_gen, &l, &
r, &zero);
572 secp256k1_scalar_mul(&l, &l, &
r);
573 secp256k1_scalar_add(&t0, &t0, &l);
577 secp256k1_lr_generator_init(&lr_gen, nonce, &y, &z, nbits, value, min_value, n_commits);
578 secp256k1_scalar_clear(&t1);
579 for (i = 0; i < nbits * n_commits; i++) {
582 secp256k1_scalar_set_int(&one, 1);
583 secp256k1_lr_generate(&lr_gen, &l, &
r, &one);
584 secp256k1_scalar_mul(&l, &l, &
r);
585 secp256k1_scalar_add(&t1, &t1, &l);
589 secp256k1_lr_generator_init(&lr_gen, nonce, &y, &z, nbits, value, min_value, n_commits);
590 secp256k1_scalar_clear(&t2);
591 for (i = 0; i < nbits * n_commits; i++) {
594 secp256k1_scalar_set_int(&negone, 1);
595 secp256k1_scalar_negate(&negone, &negone);
596 secp256k1_lr_generate(&lr_gen, &l, &
r, &negone);
597 secp256k1_scalar_mul(&l, &l, &
r);
598 secp256k1_scalar_add(&t2, &t2, &l);
602 secp256k1_scalar_set_int(&tmps, 2);
603 secp256k1_scalar_inverse_var(&tmps, &tmps);
604 secp256k1_scalar_negate(&t2, &t2);
605 secp256k1_scalar_add(&t1, &t1, &t2);
606 secp256k1_scalar_mul(&t1, &t1, &tmps);
609 secp256k1_scalar_add(&t2, &t2, &t0);
610 secp256k1_scalar_negate(&t2, &t2);
611 secp256k1_scalar_add(&t2, &t2, &t1);
614 secp256k1_ecmult_const(&tmpj, value_gen, &t1, 256);
615 secp256k1_ge_set_gej(&out_pt[2], &tmpj);
616 secp256k1_ecmult_const(&tmpj, &gens->
blinding_gen[0], &tau1, 256);
617 secp256k1_gej_add_ge(&tmpj, &tmpj, &out_pt[2]);
618 secp256k1_ge_set_gej(&out_pt[2], &tmpj);
620 secp256k1_ecmult_const(&tmpj, value_gen, &t2, 256);
621 secp256k1_ge_set_gej(&out_pt[3], &tmpj);
622 secp256k1_ecmult_const(&tmpj, &gens->
blinding_gen[0], &tau2, 256);
623 secp256k1_gej_add_ge(&tmpj, &tmpj, &out_pt[3]);
624 secp256k1_ge_set_gej(&out_pt[3], &tmpj);
627 secp256k1_bulletproof_update_commit(commit, &out_pt[2], &out_pt[3]);
628 secp256k1_scalar_set_b32(&x, commit, &overflow);
629 if (overflow || secp256k1_scalar_is_zero(&x)) {
632 secp256k1_scalar_sqr(&xsq, &x);
635 secp256k1_scalar_mul(&taux, &tau1, &x);
636 secp256k1_scalar_mul(&tmps, &tau2, &xsq);
637 secp256k1_scalar_add(&taux, &taux, &tmps);
638 for (i = 0; i < n_commits; i++) {
639 secp256k1_scalar_mul(&tmps, &zsq, &blind[i]);
640 secp256k1_scalar_add(&taux, &taux, &tmps);
641 secp256k1_scalar_mul(&zsq, &zsq, &z);
644 secp256k1_scalar_mul(&mu, &rho, &x);
645 secp256k1_scalar_add(&mu, &mu, &alpha);
648 secp256k1_scalar_negate(&taux, &taux);
649 secp256k1_scalar_negate(&mu, &mu);
652 secp256k1_scalar_get_b32(&proof[0], &taux);
653 secp256k1_scalar_get_b32(&proof[32], &mu);
654 secp256k1_bulletproof_serialize_points(&proof[64], out_pt, 4);
657 secp256k1_sha256_initialize(&
sha256);
658 secp256k1_sha256_write(&
sha256, commit, 32);
659 secp256k1_sha256_write(&
sha256, proof, 64);
660 secp256k1_sha256_finalize(&
sha256, commit);
664 secp256k1_lr_generator_init(&abgh_data.
lr_gen, nonce, &y, &z, nbits, value, min_value, n_commits);
665 *plen -= 64 + 128 + 1;
666 secp256k1_scalar_inverse_var(&y, &y);
667 if (secp256k1_bulletproof_inner_product_prove_impl(ecmult_ctx, scratch, &proof[64 + 128 + 1], plen, gens, &y, nbits * n_commits, secp256k1_bulletproof_abgh_callback, (
void *) &abgh_data, commit) == 0) {
670 *plen += 64 + 128 + 1;
675 static int secp256k1_bulletproof_rangeproof_rewind_impl(uint64_t *value,
secp256k1_scalar *blind,
const unsigned char *proof,
const size_t plen, uint64_t min_value,
const secp256k1_pedersen_commitment *pcommit,
const secp256k1_generator *value_gen,
const secp256k1_ge *blind_gen,
const unsigned char *nonce,
const unsigned char *extra_commit,
size_t extra_commit_len) {
677 static const unsigned char zero24[24] = { 0 };
678 unsigned char commit[32] = { 0 };
679 unsigned char lrparity;
692 secp256k1_scalar_set_b32(&taux, &proof[0], &overflow);
693 if (overflow || secp256k1_scalar_is_zero(&taux)) {
696 secp256k1_scalar_set_b32(&mu, &proof[32], &overflow);
697 if (overflow || secp256k1_scalar_is_zero(&mu)) {
701 secp256k1_scalar_chacha20(&alpha, &rho, nonce, 0);
702 secp256k1_scalar_chacha20(&tau1, &tau2, nonce, 1);
705 unsigned char vbuf[8];
706 const unsigned char len[4] = { 1, 0, 0, 0 };
708 vbuf[1] = min_value >> 8;
709 vbuf[2] = min_value >> 16;
710 vbuf[3] = min_value >> 24;
711 vbuf[4] = min_value >> 32;
712 vbuf[5] = min_value >> 40;
713 vbuf[6] = min_value >> 48;
714 vbuf[7] = min_value >> 56;
715 secp256k1_sha256_initialize(&
sha256);
716 secp256k1_sha256_write(&
sha256, commit, 32);
717 secp256k1_sha256_write(&
sha256, len, 4);
718 secp256k1_sha256_write(&
sha256, vbuf, 8);
719 secp256k1_sha256_finalize(&
sha256, commit);
722 secp256k1_pedersen_commitment_load(&commitp, pcommit);
723 secp256k1_generator_load(&value_genp, value_gen);
724 secp256k1_bulletproof_update_commit(commit, &commitp, &value_genp);
726 if (extra_commit != NULL) {
727 secp256k1_sha256_initialize(&
sha256);
728 secp256k1_sha256_write(&
sha256, commit, 32);
729 secp256k1_sha256_write(&
sha256, extra_commit, extra_commit_len);
730 secp256k1_sha256_finalize(&
sha256, commit);
734 lrparity = 2 * !!(proof[64] & 1) + !!(proof[64] & 2);
736 secp256k1_sha256_initialize(&
sha256);
737 secp256k1_sha256_write(&
sha256, commit, 32);
738 secp256k1_sha256_write(&
sha256, &lrparity, 1);
739 secp256k1_sha256_write(&
sha256, &proof[65], 64);
740 secp256k1_sha256_finalize(&
sha256, commit);
743 secp256k1_sha256_initialize(&
sha256);
744 secp256k1_sha256_write(&
sha256, commit, 32);
745 secp256k1_sha256_write(&
sha256, &lrparity, 1);
746 secp256k1_sha256_write(&
sha256, &proof[65], 64);
747 secp256k1_sha256_finalize(&
sha256, commit);
749 secp256k1_scalar_set_b32(&z, commit, &overflow);
750 if (overflow || secp256k1_scalar_is_zero(&z)) {
755 lrparity = 2 * !!(proof[64] & 4) + !!(proof[64] & 8);
756 secp256k1_sha256_initialize(&
sha256);
757 secp256k1_sha256_write(&
sha256, commit, 32);
758 secp256k1_sha256_write(&
sha256, &lrparity, 1);
759 secp256k1_sha256_write(&
sha256, &proof[129], 64);
760 secp256k1_sha256_finalize(&
sha256, commit);
762 secp256k1_scalar_set_b32(&x, commit, &overflow);
763 if (overflow || secp256k1_scalar_is_zero(&x)) {
768 secp256k1_scalar_mul(&rho, &rho, &x);
769 secp256k1_scalar_add(&mu, &mu, &rho);
770 secp256k1_scalar_add(&mu, &mu, &alpha);
772 secp256k1_scalar_get_b32(commit, &mu);
773 if (memcmp(commit, zero24, 24) != 0) {
776 *value = commit[31] + ((uint64_t) commit[30] << 8) +
777 ((uint64_t) commit[29] << 16) + ((uint64_t) commit[28] << 24) +
778 ((uint64_t) commit[27] << 32) + ((uint64_t) commit[26] << 40) +
779 ((uint64_t) commit[25] << 48) + ((uint64_t) commit[24] << 56);
782 secp256k1_scalar_mul(&tau1, &tau1, &x);
783 secp256k1_scalar_mul(&tau2, &tau2, &x);
784 secp256k1_scalar_mul(&tau2, &tau2, &x);
786 secp256k1_scalar_add(&taux, &taux, &tau1);
787 secp256k1_scalar_add(&taux, &taux, &tau2);
789 secp256k1_scalar_sqr(&z, &z);
790 secp256k1_scalar_inverse_var(&z, &z);
791 secp256k1_scalar_mul(blind, &taux, &z);
792 secp256k1_scalar_negate(blind, blind);
795 secp256k1_pedersen_ecmult(&rewind_commitj, blind, *value, &value_genp, blind_gen);
796 secp256k1_gej_neg(&rewind_commitj, &rewind_commitj);
797 secp256k1_gej_add_ge_var(&rewind_commitj, &rewind_commitj, &commitp, NULL);
799 return secp256k1_gej_is_infinity(&rewind_commitj);