7 #ifndef _SECP256K1_RANGEPROOF_IMPL_H_
8 #define _SECP256K1_RANGEPROOF_IMPL_H_
21 int exp,
size_t *rsizes,
size_t rings,
const secp256k1_ge* genp) {
30 secp256k1_gej_set_ge(&base, genp);
31 secp256k1_gej_neg(&base, &base);
35 secp256k1_gej_double_var(&tmp, &base, NULL);
36 secp256k1_gej_double_var(&base, &tmp, NULL);
37 secp256k1_gej_double_var(&base, &base, NULL);
38 secp256k1_gej_add_var(&base, &base, &tmp, NULL);
41 for (i = 0; i < rings; i++) {
42 for (j = 1; j < rsizes[i]; j++) {
43 secp256k1_gej_add_var(&pubs[npub + j], &pubs[npub + j - 1], &base, NULL);
46 secp256k1_gej_double_var(&base, &base, NULL);
47 secp256k1_gej_double_var(&base, &base, NULL);
56 secp256k1_fe_normalize(&pointx);
57 data[0] = !secp256k1_fe_is_quad_var(&point->
y);
58 secp256k1_fe_get_b32(data + 1, &pointx);
62 size_t *rsizes,
size_t rings,
const unsigned char *nonce,
const secp256k1_ge *commit,
const unsigned char *proof,
size_t len,
const secp256k1_ge* genp) {
63 unsigned char tmp[32];
64 unsigned char rngseed[32 + 33 + 33 + 10];
74 memcpy(rngseed, nonce, 32);
75 secp256k1_rangeproof_serialize_point(rngseed + 32, commit);
76 secp256k1_rangeproof_serialize_point(rngseed + 32 + 33, genp);
77 memcpy(rngseed + 33 + 33 + 32, proof, len);
78 secp256k1_rfc6979_hmac_sha256_initialize(&rng, rngseed, 32 + 33 + 33 + len);
79 secp256k1_scalar_clear(&acc);
82 for (i = 0; i < rings; i++) {
84 secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32);
86 secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32);
87 secp256k1_scalar_set_b32(&sec[i], tmp, &overflow);
88 }
while (overflow || secp256k1_scalar_is_zero(&sec[i]));
89 secp256k1_scalar_add(&acc, &acc, &sec[i]);
91 secp256k1_scalar_negate(&acc, &acc);
94 for (j = 0; j < rsizes[i]; j++) {
95 secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32);
97 for (
b = 0;
b < 32;
b++) {
98 tmp[
b] ^= message[(i * 4 + j) * 32 +
b];
99 message[(i * 4 + j) * 32 +
b] = tmp[
b];
102 secp256k1_scalar_set_b32(&s[npub], tmp, &overflow);
103 ret &= !(overflow || secp256k1_scalar_is_zero(&s[npub]));
107 secp256k1_rfc6979_hmac_sha256_finalize(&rng);
108 secp256k1_scalar_clear(&acc);
113 SECP256K1_INLINE static int secp256k1_range_proveparams(uint64_t *v,
size_t *rings,
size_t *rsizes,
size_t *npub,
size_t *secidx, uint64_t *min_value,
114 int *mantissa, uint64_t *scale,
int *exp,
int *min_bits, uint64_t value) {
122 if (*min_value == UINT64_MAX) {
129 if ((*min_value && value > INT64_MAX) || (value && *min_value >= INT64_MAX)) {
133 max_bits = *min_value ? secp256k1_clz64_var(*min_value) : 64;
134 if (*min_bits > max_bits) {
135 *min_bits = max_bits;
137 if (*min_bits > 61 || value > INT64_MAX) {
146 *v = value - *min_value;
148 v2 = *min_bits ? (UINT64_MAX>>(64-*min_bits)) : 0;
149 for (i = 0; (int) i < *exp && (v2 <= UINT64_MAX / 10); i++) {
155 for (i = 0; (int) i < *exp; i++) {
160 *min_value = value - v2;
162 *mantissa = *v ? 64 - secp256k1_clz64_var(*v) : 1;
163 if (*min_bits > *mantissa) {
165 *mantissa = *min_bits;
168 *rings = (*mantissa + 1) >> 1;
169 for (i = 0; i < *rings; i++) {
170 rsizes[i] = ((i < *rings - 1) | (!(*mantissa&1))) ? 4 : 2;
172 secidx[i] = (*v >> (i*2)) & 3;
175 VERIFY_CHECK((*v & ~(UINT64_MAX>>(64-*mantissa))) == 0);
193 unsigned char *proof,
size_t *plen, uint64_t min_value,
194 const secp256k1_ge *commit,
const unsigned char *blind,
const unsigned char *nonce,
int exp,
int min_bits, uint64_t value,
195 const unsigned char *message,
size_t msg_len,
const unsigned char *extra_commit,
size_t extra_commit_len,
const secp256k1_ge* genp){
202 unsigned char prep[4096];
203 unsigned char tmp[33];
204 unsigned char *signs;
216 if (*plen < 65 || min_value > value || min_bits > 64 || min_bits < 0 || exp < -1 || exp > 18) {
219 if (!secp256k1_range_proveparams(&v, &rings, rsizes, &npub, secidx, &min_value, &mantissa, &scale, &exp, &min_bits, value)) {
222 proof[len] = (rsizes[0] > 1 ? (64 | exp) : 0) | (min_value ? 32 : 0);
226 proof[len] = mantissa - 1;
230 for (i = 0; i < 8; i++) {
231 proof[len + i] = (min_value >> ((7-i) * 8)) & 255;
239 if (msg_len > 0 && msg_len > 128 * (rings - 1)) {
243 if (*plen - len < 32 * (npub + rings - 1) + 32 + ((rings+6) >> 3)) {
246 secp256k1_sha256_initialize(&sha256_m);
247 secp256k1_rangeproof_serialize_point(tmp, commit);
248 secp256k1_sha256_write(&sha256_m, tmp, 33);
249 secp256k1_rangeproof_serialize_point(tmp, genp);
250 secp256k1_sha256_write(&sha256_m, tmp, 33);
251 secp256k1_sha256_write(&sha256_m, proof, len);
253 memset(prep, 0, 4096);
254 if (message != NULL) {
255 memcpy(prep, message, msg_len);
258 if (rsizes[rings - 1] > 1) {
261 idx = rsizes[rings - 1] - 1;
262 idx -= secidx[rings - 1] == idx;
263 idx = ((rings - 1) * 4 + idx) * 32;
264 for (i = 0; i < 8; i++) {
265 prep[8 + i + idx] = prep[16 + i + idx] = prep[24 + i + idx] = (v >> (56 - i * 8)) & 255;
270 if (!secp256k1_rangeproof_genrand(sec, s, prep, rsizes, rings, nonce, commit, proof, len, genp)) {
273 memset(prep, 0, 4096);
274 for (i = 0; i < rings; i++) {
276 k[i] = s[i * 4 + secidx[i]];
277 secp256k1_scalar_clear(&s[i * 4 + secidx[i]]);
285 secp256k1_scalar_set_b32(&stmp, blind, &overflow);
286 secp256k1_scalar_add(&sec[rings - 1], &sec[rings - 1], &stmp);
287 if (overflow || secp256k1_scalar_is_zero(&sec[rings - 1])) {
292 for (i = 0; i < (rings + 6) >> 3; i++) {
297 for (i = 0; i < rings; i++) {
299 secp256k1_pedersen_ecmult(&pubs[npub], &sec[i], ((uint64_t)secidx[i] * scale) << (i*2), genp, &secp256k1_ge_const_g);
300 if (secp256k1_gej_is_infinity(&pubs[npub])) {
304 unsigned char tmpc[33];
306 unsigned char quadness;
309 secp256k1_ge_set_gej_var(&c, &pubs[npub]);
310 secp256k1_rangeproof_serialize_point(tmpc, &c);
312 secp256k1_sha256_write(&sha256_m, tmpc, 33);
313 signs[i>>3] |= quadness << (i&7);
314 memcpy(&proof[len], tmpc + 1, 32);
319 secp256k1_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp);
320 if (extra_commit != NULL) {
321 secp256k1_sha256_write(&sha256_m, extra_commit, extra_commit_len);
323 secp256k1_sha256_finalize(&sha256_m, tmp);
324 if (!
secp256k1_borromean_sign(ecmult_ctx, ecmult_gen_ctx, &proof[len], s, pubs, k, sec, rsizes, secidx, rings, tmp, 32)) {
328 for (i = 0; i < npub; i++) {
329 secp256k1_scalar_get_b32(&proof[len],&s[i]);
334 memset(prep, 0, 4096);
342 secp256k1_scalar_negate(x, s);
343 secp256k1_scalar_add(x, x, k);
344 secp256k1_scalar_inverse(&stmp, e);
345 secp256k1_scalar_mul(x, x, &stmp);
352 secp256k1_scalar_mul(&stmp, x, e);
353 secp256k1_scalar_add(k, s, &stmp);
356 SECP256K1_INLINE static void secp256k1_rangeproof_ch32xor(
unsigned char *x,
const unsigned char *y) {
358 for (i = 0; i < 32; i++) {
365 size_t *rsizes,
size_t rings,
const unsigned char *nonce,
const secp256k1_ge *commit,
const unsigned char *proof,
size_t len,
const secp256k1_ge *genp) {
369 unsigned char prep[4096];
370 unsigned char tmp[32];
379 npub = ((rings - 1) << 2) + rsizes[rings-1];
382 memset(prep, 0, 4096);
384 secp256k1_rangeproof_genrand(sec, s_orig, prep, rsizes, rings, nonce, commit, proof, len, genp);
386 secp256k1_scalar_clear(blind);
387 if (rings == 1 && rsizes[0] == 1) {
389 secp256k1_rangeproof_recover_x(blind, &s_orig[0], &ev[0], &s[0]);
398 npub = (rings - 1) << 2;
399 for (j = 0; j < 2; j++) {
402 idx = npub + rsizes[rings - 1] - 1 - j;
403 secp256k1_scalar_get_b32(tmp, &s[idx]);
404 secp256k1_rangeproof_ch32xor(tmp, &prep[idx * 32]);
405 if ((tmp[0] & 128) && (memcmp(&tmp[16], &tmp[24], 8) == 0) && (memcmp(&tmp[8], &tmp[16], 8) == 0)) {
407 for (i = 0; i < 8; i++) {
408 value = (value << 8) + tmp[24 + i];
413 memcpy(&prep[idx * 32], tmp, 32);
424 skip1 = rsizes[rings - 1] - 1 - j;
425 skip2 = ((value >> ((rings - 1) << 1)) & 3);
426 if (skip1 == skip2) {
433 skip1 += (rings - 1) << 2;
434 skip2 += (rings - 1) << 2;
436 secp256k1_rangeproof_recover_x(&stmp, &s_orig[skip2], &ev[skip2], &s[skip2]);
437 secp256k1_scalar_negate(&sec[rings - 1], &sec[rings - 1]);
438 secp256k1_scalar_add(blind, &stmp, &sec[rings - 1]);
439 if (!m || !mlen || *mlen == 0) {
448 for (i = 0; i < rings; i++) {
450 idx = (value >> (i << 1)) & 3;
451 for (j = 0; j < rsizes[i]; j++) {
452 if (npub == skip1 || npub == skip2) {
461 secp256k1_rangeproof_recover_k(&stmp, &sec[i], &ev[npub], &s[npub]);
465 secp256k1_scalar_get_b32(tmp, &stmp);
466 secp256k1_rangeproof_ch32xor(tmp, &prep[npub * 32]);
467 for (
b = 0;
b < 32 && offset < *mlen;
b++) {
475 memset(prep, 0, 4096);
476 for (i = 0; i < 128; i++) {
477 secp256k1_scalar_clear(&s_orig[i]);
479 for (i = 0; i < 32; i++) {
480 secp256k1_scalar_clear(&sec[i]);
482 secp256k1_scalar_clear(&stmp);
486 SECP256K1_INLINE static int secp256k1_rangeproof_getheader_impl(
size_t *offset,
int *exp,
int *mantissa, uint64_t *scale,
487 uint64_t *min_value, uint64_t *max_value,
const unsigned char *proof,
size_t plen) {
491 if (plen < 65 || ((proof[*offset] & 128) != 0)) {
494 has_nz_range = proof[*offset] & 64;
495 has_min = proof[*offset] & 32;
499 *exp = proof[*offset] & 31;
504 *mantissa = proof[*offset] + 1;
505 if (*mantissa > 64) {
508 *max_value = UINT64_MAX>>(64-*mantissa);
514 for (i = 0; i < *exp; i++) {
515 if (*max_value > UINT64_MAX / 10) {
523 if(plen - *offset < 8) {
527 for (i = 0; i < 8; i++) {
528 *min_value = (*min_value << 8) | proof[*offset + i];
532 if (*max_value > UINT64_MAX - *min_value) {
535 *max_value += *min_value;
542 unsigned char *blindout, uint64_t *value_out,
unsigned char *message_out,
size_t *outlen,
const unsigned char *nonce,
543 uint64_t *min_value, uint64_t *max_value,
const secp256k1_ge *commit,
const unsigned char *proof,
size_t plen,
const unsigned char *extra_commit,
size_t extra_commit_len,
const secp256k1_ge* genp) {
559 int offset_post_header;
561 unsigned char signs[31];
563 const unsigned char *e0;
565 if (!secp256k1_rangeproof_getheader_impl(&offset, &exp, &mantissa, &scale, min_value, max_value, proof, plen)) {
568 offset_post_header = offset;
573 rings = (mantissa >> 1);
574 for (i = 0; i < rings; i++) {
577 npub = (mantissa >> 1) << 2;
580 npub += rsizes[rings];
585 if (plen - offset < 32 * (npub + rings - 1) + 32 + ((rings+6) >> 3)) {
588 secp256k1_sha256_initialize(&sha256_m);
589 secp256k1_rangeproof_serialize_point(m, commit);
590 secp256k1_sha256_write(&sha256_m, m, 33);
591 secp256k1_rangeproof_serialize_point(m, genp);
592 secp256k1_sha256_write(&sha256_m, m, 33);
593 secp256k1_sha256_write(&sha256_m, proof, offset);
594 for(i = 0; i < rings - 1; i++) {
595 signs[i] = (proof[offset + ( i>> 3)] & (1 << (i & 7))) != 0;
597 offset += (rings + 6) >> 3;
598 if ((rings - 1) & 7) {
600 if ((proof[offset - 1] >> ((rings - 1) & 7)) != 0) {
605 secp256k1_gej_set_infinity(&accj);
608 secp256k1_scalar_set_u64(&mvs, *min_value);
609 secp256k1_ecmult_const(&accj, genp, &mvs, 64);
610 secp256k1_scalar_clear(&mvs);
612 for(i = 0; i < rings - 1; i++) {
614 if (!secp256k1_fe_set_b32(&fe, &proof[offset]) ||
615 !secp256k1_ge_set_xquad(&c, &fe)) {
619 secp256k1_ge_neg(&c, &c);
623 secp256k1_sha256_write(&sha256_m, &signs[i], 1);
624 secp256k1_sha256_write(&sha256_m, &proof[offset], 32);
625 secp256k1_gej_set_ge(&pubs[npub], &c);
626 secp256k1_gej_add_ge_var(&accj, &accj, &c, NULL);
630 secp256k1_gej_neg(&accj, &accj);
631 secp256k1_gej_add_ge_var(&pubs[npub], &accj, commit, NULL);
632 if (secp256k1_gej_is_infinity(&pubs[npub])) {
635 secp256k1_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp);
636 npub += rsizes[rings - 1];
639 for (i = 0; i < npub; i++) {
640 secp256k1_scalar_set_b32(&s[i], &proof[offset], &overflow);
646 if (offset != plen) {
650 if (extra_commit != NULL) {
651 secp256k1_sha256_write(&sha256_m, extra_commit, extra_commit_len);
653 secp256k1_sha256_finalize(&sha256_m, m);
659 if (!ecmult_gen_ctx) {
662 if (!secp256k1_rangeproof_rewind_inner(&blind, &vv, message_out, outlen, evalues, s, rsizes, rings, nonce, commit, proof, offset_post_header, genp)) {
667 vv = (vv * scale) + *min_value;
668 secp256k1_pedersen_ecmult(&accj, &blind, vv, genp, &secp256k1_ge_const_g);
669 if (secp256k1_gej_is_infinity(&accj)) {
672 secp256k1_gej_neg(&accj, &accj);
673 secp256k1_gej_add_ge_var(&accj, &accj, commit, NULL);
674 if (!secp256k1_gej_is_infinity(&accj)) {
678 secp256k1_scalar_get_b32(blindout, &blind);