diff --git a/.github/actions/install-homebrew-valgrind/action.yml b/.github/actions/install-homebrew-valgrind/action.yml index ce10eb268..e9aa61508 100644 --- a/.github/actions/install-homebrew-valgrind/action.yml +++ b/.github/actions/install-homebrew-valgrind/action.yml @@ -16,7 +16,7 @@ runs: cat valgrind_fingerprint shell: bash - - uses: actions/cache@v4 + - uses: actions/cache@v5 id: cache with: path: ${{ env.CI_HOMEBREW_CELLAR_VALGRIND }} diff --git a/.github/actions/run-in-docker-action/action.yml b/.github/actions/run-in-docker-action/action.yml index bbbcf324c..f0eb9810c 100644 --- a/.github/actions/run-in-docker-action/action.yml +++ b/.github/actions/run-in-docker-action/action.yml @@ -13,9 +13,9 @@ inputs: runs: using: "composite" steps: - - uses: docker/setup-buildx-action@v3 + - uses: docker/setup-buildx-action@v4 - - uses: docker/build-push-action@v6 + - uses: docker/build-push-action@v7 id: main_builder continue-on-error: true with: @@ -24,7 +24,7 @@ runs: load: true cache-from: type=gha,scope=${{ inputs.scope }} - - uses: docker/build-push-action@v6 + - uses: docker/build-push-action@v7 id: retry_builder if: steps.main_builder.outcome == 'failure' with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d6ff8142b..040d3f19b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,14 +81,14 @@ jobs: run: echo "period=$((10#$(date +%V) / 4))" >> "$GITHUB_OUTPUT" - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 with: # See: https://github.com/moby/buildkit/issues/3969. driver-opts: | network=host - name: Build container - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 with: file: ./ci/linux-debian.Dockerfile cache-from: type=gha,scope=${{ runner.arch }}-${{ steps.cache_timestamp.outputs.period }} diff --git a/include/secp256k1.h b/include/secp256k1.h index 9de45f1f7..b7ec6a228 100644 --- a/include/secp256k1.h +++ b/include/secp256k1.h @@ -6,6 +6,7 @@ extern "C" { #endif #include +#include /** Unless explicitly stated all pointer arguments must not be NULL. * @@ -404,6 +405,46 @@ SECP256K1_API void secp256k1_context_set_error_callback( const void *data ) SECP256K1_ARG_NONNULL(1); +/** A pointer to a function implementing SHA256's internal compression function. + * + * This function processes one or more contiguous 64-byte message blocks and + * updates the internal SHA256 state accordingly. The function is not responsible + * for counting consumed blocks or bytes, nor for performing padding. + * + * In/Out: state: pointer to eight 32-bit words representing the current internal state; + * the state is updated in place. + * In: blocks64: pointer to concatenation of n_blocks blocks, of 64 bytes each. + * no alignment guarantees are made for this pointer. + * n_blocks: number of contiguous 64-byte blocks to process. + */ +typedef void (*secp256k1_sha256_compression_function)( + uint32_t *state, + const unsigned char *blocks64, + size_t n_blocks +); + +/** + * Set a callback function to override the internal SHA256 compression function. + * + * This installs a function to replace the built-in block-compression + * step used by the library's internal SHA256 implementation. + * The provided callback must exactly implement the effect of n_blocks + * repeated applications of the SHA256 compression function. + * + * This API exists to support environments that wish to route the + * SHA256 compression step through a hardware-accelerated or otherwise + * specialized implementation. It is NOT meant for replacing SHA256 + * with a different hash function. + * + * Args: ctx: pointer to a context object. + * In: fn_compression: pointer to a function implementing the compression function; + * passing NULL restores the default implementation. + */ +SECP256K1_API void secp256k1_context_set_sha256_compression( + secp256k1_context *ctx, + secp256k1_sha256_compression_function fn_compression +) SECP256K1_ARG_NONNULL(1); + /** Parse a variable-length public key into the pubkey object. * * Returns: 1 if the public key was fully valid. diff --git a/src/bench.c b/src/bench.c index de7fef94c..f561ad1c9 100644 --- a/src/bench.c +++ b/src/bench.c @@ -183,8 +183,7 @@ int main(int argc, char** argv) { "ecdsa_recover", "schnorrsig", "schnorrsig_verify", "schnorrsig_sign", "ec", "keygen", "ec_keygen", "ellswift", "encode", "ellswift_encode", "decode", "ellswift_decode", "ellswift_keygen", "ellswift_ecdh"}; - size_t valid_args_size = sizeof(valid_args)/sizeof(valid_args[0]); - int invalid_args = have_invalid_args(argc, argv, valid_args, valid_args_size); + int invalid_args = have_invalid_args(argc, argv, valid_args, ARRAY_SIZE(valid_args)); int default_iters = 20000; int iters = get_iters(default_iters); diff --git a/src/bench_ecmult.c b/src/bench_ecmult.c index 7393730d9..eb546db41 100644 --- a/src/bench_ecmult.c +++ b/src/bench_ecmult.c @@ -259,7 +259,7 @@ static void bench_ecmult_multi_teardown(void* arg, int iters) { } } -static void generate_scalar(uint32_t num, secp256k1_scalar* scalar) { +static void generate_scalar(const secp256k1_context *ctx, uint32_t num, secp256k1_scalar* scalar) { secp256k1_sha256 sha256; unsigned char c[10] = {'e', 'c', 'm', 'u', 'l', 't', 0, 0, 0, 0}; unsigned char buf[32]; @@ -269,8 +269,8 @@ static void generate_scalar(uint32_t num, secp256k1_scalar* scalar) { c[8] = num >> 16; c[9] = num >> 24; secp256k1_sha256_initialize(&sha256); - secp256k1_sha256_write(&sha256, c, sizeof(c)); - secp256k1_sha256_finalize(&sha256, buf); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &sha256, c, sizeof(c)); + secp256k1_sha256_finalize(secp256k1_get_hash_context(ctx), &sha256, buf); secp256k1_scalar_set_b32(scalar, buf, &overflow); CHECK(!overflow); } @@ -362,7 +362,7 @@ int main(int argc, char **argv) { secp256k1_gej_set_ge(&data.pubkeys_gej[0], &secp256k1_ge_const_g); secp256k1_scalar_set_int(&data.seckeys[0], 1); for (i = 0; i < POINTS; ++i) { - generate_scalar(i, &data.scalars[i]); + generate_scalar(data.ctx, i, &data.scalars[i]); if (i) { secp256k1_gej_double_var(&data.pubkeys_gej[i], &data.pubkeys_gej[i - 1], NULL); secp256k1_scalar_add(&data.seckeys[i], &data.seckeys[i - 1], &data.seckeys[i - 1]); diff --git a/src/bench_internal.c b/src/bench_internal.c index 7539ccbbb..bdfc33804 100644 --- a/src/bench_internal.c +++ b/src/bench_internal.c @@ -38,6 +38,7 @@ static void help(const char *executable_path, int default_iters) { } typedef struct { + const secp256k1_context* ctx; secp256k1_scalar scalar[2]; secp256k1_fe fe[4]; secp256k1_ge ge[2]; @@ -82,6 +83,9 @@ static void bench_setup(void* arg) { } }; + /* Customize context if needed */ + data->ctx = secp256k1_context_static; + secp256k1_scalar_set_b32(&data->scalar[0], init[0], NULL); secp256k1_scalar_set_b32(&data->scalar[1], init[1], NULL); secp256k1_fe_set_b32_limit(&data->fe[0], init[0]); @@ -371,11 +375,12 @@ static void bench_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; secp256k1_sha256 sha; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(data->ctx); for (i = 0; i < iters; i++) { secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, data->data, 32); - secp256k1_sha256_finalize(&sha, data->data); + secp256k1_sha256_write(hash_ctx, &sha, data->data, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, data->data); } } @@ -383,11 +388,12 @@ static void bench_hmac_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; secp256k1_hmac_sha256 hmac; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(data->ctx); for (i = 0; i < iters; i++) { - secp256k1_hmac_sha256_initialize(&hmac, data->data, 32); - secp256k1_hmac_sha256_write(&hmac, data->data, 32); - secp256k1_hmac_sha256_finalize(&hmac, data->data); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, data->data, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, data->data, 32); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, data->data); } } @@ -395,10 +401,11 @@ static void bench_rfc6979_hmac_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; secp256k1_rfc6979_hmac_sha256 rng; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(data->ctx); for (i = 0; i < iters; i++) { - secp256k1_rfc6979_hmac_sha256_initialize(&rng, data->data, 64); - secp256k1_rfc6979_hmac_sha256_generate(&rng, data->data, 32); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, data->data, 64); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, data->data, 32); } } diff --git a/src/bench_whitelist.c b/src/bench_whitelist.c index 46de7d5ac..ecaa77b3b 100644 --- a/src/bench_whitelist.c +++ b/src/bench_whitelist.c @@ -51,15 +51,17 @@ static void run_test(bench_data* data, int iters) { static void generate_scalar(secp256k1_scalar *scalar, unsigned char *seckey, uint32_t num) { secp256k1_sha256 sha256; + secp256k1_hash_ctx hash_ctx; unsigned char c[13] = {'w','h','i','t','e','l','i','s','t', 0, 0, 0, 0}; int is_valid; + secp256k1_hash_ctx_init(&hash_ctx); c[9] = num; c[10] = num >> 8; c[11] = num >> 16; c[12] = num >> 24; secp256k1_sha256_initialize(&sha256); - secp256k1_sha256_write(&sha256, c, sizeof(c)); - secp256k1_sha256_finalize(&sha256, seckey); + secp256k1_sha256_write(&hash_ctx, &sha256, c, sizeof(c)); + secp256k1_sha256_finalize(&hash_ctx, &sha256, seckey); is_valid = secp256k1_scalar_set_b32_seckey(scalar, seckey); CHECK(is_valid); } diff --git a/src/eccommit.h b/src/eccommit.h index 6bb110399..4932ad1cc 100644 --- a/src/eccommit.h +++ b/src/eccommit.h @@ -17,7 +17,7 @@ static int secp256k1_ec_pubkey_tweak_add_helper(const secp256k1_ecmult_context* * pubp->infinity before calling this function. */ static int secp256k1_ec_commit_pubkey_serialize_const(secp256k1_ge *pubp, unsigned char *buf33); /** Compute an ec commitment tweak as hash(pubkey, data). */ -static int secp256k1_ec_commit_tweak(unsigned char *tweak32, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size); +static int secp256k1_ec_commit_tweak(const secp256k1_hash_ctx *hash_ctx, unsigned char *tweak32, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size); /** Compute an ec commitment as pubkey + hash(pubkey, data)*G. */ static int secp256k1_ec_commit(const secp256k1_ecmult_context* ecmult_ctx, secp256k1_ge* commitp, const secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size); /** Compute a secret key commitment as seckey + hash(pubkey, data). */ diff --git a/src/eccommit_impl.h b/src/eccommit_impl.h index 2dc24257b..8f273bc5b 100644 --- a/src/eccommit_impl.h +++ b/src/eccommit_impl.h @@ -25,42 +25,42 @@ static int secp256k1_ec_commit_pubkey_serialize_const(secp256k1_ge *pubp, unsign } /* Compute an ec commitment tweak as hash(pubp, data). */ -static int secp256k1_ec_commit_tweak(unsigned char *tweak32, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) +static int secp256k1_ec_commit_tweak(const secp256k1_hash_ctx *hash_ctx, unsigned char *tweak32, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char rbuf[33]; if (!secp256k1_ec_commit_pubkey_serialize_const(pubp, rbuf)) { return 0; } - secp256k1_sha256_write(sha, rbuf, sizeof(rbuf)); - secp256k1_sha256_write(sha, data, data_size); - secp256k1_sha256_finalize(sha, tweak32); + secp256k1_sha256_write(hash_ctx, sha, rbuf, sizeof(rbuf)); + secp256k1_sha256_write(hash_ctx, sha, data, data_size); + secp256k1_sha256_finalize(hash_ctx, sha, tweak32); return 1; } /* Compute an ec commitment as pubp + hash(pubp, data)*G. */ -static int secp256k1_ec_commit(secp256k1_ge* commitp, const secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { +static int secp256k1_ec_commit(const secp256k1_hash_ctx *hash_ctx, secp256k1_ge* commitp, const secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char tweak[32]; *commitp = *pubp; - return secp256k1_ec_commit_tweak(tweak, commitp, sha, data, data_size) + return secp256k1_ec_commit_tweak(hash_ctx, tweak, commitp, sha, data, data_size) && secp256k1_ec_pubkey_tweak_add_helper(commitp, tweak); } /* Compute the seckey of an ec commitment from the original secret key of the pubkey as seckey + * hash(pubp, data). */ -static int secp256k1_ec_commit_seckey(secp256k1_scalar* seckey, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { +static int secp256k1_ec_commit_seckey(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar* seckey, secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char tweak[32]; - return secp256k1_ec_commit_tweak(tweak, pubp, sha, data, data_size) + return secp256k1_ec_commit_tweak(hash_ctx, tweak, pubp, sha, data, data_size) && secp256k1_ec_seckey_tweak_add_helper(seckey, tweak); } /* Verify an ec commitment as pubp + hash(pubp, data)*G ?= commitment. */ -static int secp256k1_ec_commit_verify(const secp256k1_ge* commitp, const secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { +static int secp256k1_ec_commit_verify(const secp256k1_hash_ctx *hash_ctx, const secp256k1_ge* commitp, const secp256k1_ge* pubp, secp256k1_sha256* sha, const unsigned char *data, size_t data_size) { secp256k1_gej pj; secp256k1_ge p; - if (!secp256k1_ec_commit(&p, pubp, sha, data, data_size)) { + if (!secp256k1_ec_commit(hash_ctx, &p, pubp, sha, data, data_size)) { return 0; } diff --git a/src/ecdsa_impl.h b/src/ecdsa_impl.h index ce36e85e6..163539ebc 100644 --- a/src/ecdsa_impl.h +++ b/src/ecdsa_impl.h @@ -196,6 +196,7 @@ static int secp256k1_ecdsa_sig_verify(const secp256k1_scalar *sigr, const secp25 unsigned char c[32]; secp256k1_scalar sn, u1, u2; #if !defined(EXHAUSTIVE_TEST_ORDER) + int range; secp256k1_fe xr; #endif secp256k1_gej pubkeyj; @@ -226,9 +227,16 @@ static int secp256k1_ecdsa_sig_verify(const secp256k1_scalar *sigr, const secp25 return secp256k1_scalar_eq(sigr, &computed_r); } #else + + /* Interpret sigr as a field element xr */ secp256k1_scalar_get_b32(c, sigr); - /* we can ignore the fe_set_b32_limit return value, because we know the input is in range */ - (void)secp256k1_fe_set_b32_limit(&xr, c); + range = secp256k1_fe_set_b32_limit(&xr, c); +#ifdef VERIFY + /* We know that c is in range; it comes from a scalar. */ + VERIFY_CHECK(range); +#else + (void)range; +#endif /** We now have the recomputed R point in pr, and its claimed x coordinate (modulo n) * in xr. Naively, we would extract the x coordinate from pr (requiring a inversion modulo p), diff --git a/src/eckey_impl.h b/src/eckey_impl.h index 48745e8fe..57024e409 100644 --- a/src/eckey_impl.h +++ b/src/eckey_impl.h @@ -86,7 +86,7 @@ static int secp256k1_eckey_pubkey_tweak_mul(secp256k1_ge *key, const secp256k1_s } secp256k1_gej_set_ge(&pt, key); - secp256k1_ecmult(&pt, &pt, tweak, &secp256k1_scalar_zero); + secp256k1_ecmult(&pt, &pt, tweak, NULL); secp256k1_ge_set_gej(key, &pt); return 1; } diff --git a/src/ecmult.h b/src/ecmult.h index 8d0a9f490..342195d92 100644 --- a/src/ecmult.h +++ b/src/ecmult.h @@ -40,7 +40,10 @@ /** The number of entries a table with precomputed multiples needs to have. */ #define ECMULT_TABLE_SIZE(w) ((size_t)1 << ((w)-2)) -/** Double multiply: R = na*A + ng*G */ +/** Double multiply: R = na*A + ng*G + * + * Passing NULL as ng is equivalent to the zero scalar but a tiny bit faster. + */ static void secp256k1_ecmult(secp256k1_gej *r, const secp256k1_gej *a, const secp256k1_scalar *na, const secp256k1_scalar *ng); typedef int (secp256k1_ecmult_multi_callback)(secp256k1_scalar *sc, secp256k1_ge *pt, size_t idx, void *data); diff --git a/src/ecmult_gen.h b/src/ecmult_gen.h index 43dd10c38..8bc4f14c3 100644 --- a/src/ecmult_gen.h +++ b/src/ecmult_gen.h @@ -7,6 +7,7 @@ #ifndef SECP256K1_ECMULT_GEN_H #define SECP256K1_ECMULT_GEN_H +#include "hash.h" #include "scalar.h" #include "group.h" @@ -132,12 +133,12 @@ typedef struct { secp256k1_fe proj_blind; } secp256k1_ecmult_gen_context; -static void secp256k1_ecmult_gen_context_build(secp256k1_ecmult_gen_context* ctx); +static void secp256k1_ecmult_gen_context_build(secp256k1_ecmult_gen_context* ctx, const secp256k1_hash_ctx *hash_ctx); static void secp256k1_ecmult_gen_context_clear(secp256k1_ecmult_gen_context* ctx); /** Multiply with the generator: R = a*G */ static void secp256k1_ecmult_gen(const secp256k1_ecmult_gen_context* ctx, secp256k1_gej *r, const secp256k1_scalar *a); -static void secp256k1_ecmult_gen_blind(secp256k1_ecmult_gen_context *ctx, const unsigned char *seed32); +static void secp256k1_ecmult_gen_blind(secp256k1_ecmult_gen_context *ctx, const secp256k1_hash_ctx *hash_ctx, const unsigned char *seed32); #endif /* SECP256K1_ECMULT_GEN_H */ diff --git a/src/ecmult_gen_impl.h b/src/ecmult_gen_impl.h index 2159eed5e..5a954977e 100644 --- a/src/ecmult_gen_impl.h +++ b/src/ecmult_gen_impl.h @@ -14,8 +14,8 @@ #include "hash_impl.h" #include "precomputed_ecmult_gen.h" -static void secp256k1_ecmult_gen_context_build(secp256k1_ecmult_gen_context *ctx) { - secp256k1_ecmult_gen_blind(ctx, NULL); +static void secp256k1_ecmult_gen_context_build(secp256k1_ecmult_gen_context *ctx, const secp256k1_hash_ctx *hash_ctx) { + secp256k1_ecmult_gen_blind(ctx, hash_ctx, NULL); ctx->built = 1; } @@ -282,7 +282,7 @@ static void secp256k1_ecmult_gen(const secp256k1_ecmult_gen_context *ctx, secp25 } /* Setup blinding values for secp256k1_ecmult_gen. */ -static void secp256k1_ecmult_gen_blind(secp256k1_ecmult_gen_context *ctx, const unsigned char *seed32) { +static void secp256k1_ecmult_gen_blind(secp256k1_ecmult_gen_context *ctx, const secp256k1_hash_ctx *hash_ctx, const unsigned char *seed32) { secp256k1_scalar b; secp256k1_scalar diff; secp256k1_gej gb; @@ -309,17 +309,17 @@ static void secp256k1_ecmult_gen_blind(secp256k1_ecmult_gen_context *ctx, const */ VERIFY_CHECK(seed32 != NULL); memcpy(keydata + 32, seed32, 32); - secp256k1_rfc6979_hmac_sha256_initialize(&rng, keydata, 64); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, keydata, 64); secp256k1_memclear_explicit(keydata, sizeof(keydata)); /* Compute projective blinding factor (cannot be 0). */ - secp256k1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, nonce32, 32); secp256k1_fe_set_b32_mod(&f, nonce32); secp256k1_fe_cmov(&f, &secp256k1_fe_one, secp256k1_fe_normalizes_to_zero(&f)); ctx->proj_blind = f; /* For a random blinding value b, set scalar_offset=diff-b, ge_offset=bG */ - secp256k1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, nonce32, 32); secp256k1_scalar_set_b32(&b, nonce32, NULL); /* The blinding value cannot be zero, as that would mean ge_offset = infinity, * which secp256k1_gej_add_ge cannot handle. */ diff --git a/src/hash.h b/src/hash.h index 43cdd60c3..79d97671e 100644 --- a/src/hash.h +++ b/src/hash.h @@ -10,6 +10,12 @@ #include #include +typedef struct { + secp256k1_sha256_compression_function fn_sha256_compression; +} secp256k1_hash_ctx; + +static void secp256k1_hash_ctx_init(secp256k1_hash_ctx *hash_ctx); + typedef struct { uint32_t s[8]; unsigned char buf[64]; @@ -21,17 +27,17 @@ static void secp256k1_sha256_initialize(secp256k1_sha256 *hash); * The byte counter must be a multiple of 64, i.e., there must be no unwritten * bytes in the buffer. */ static void secp256k1_sha256_initialize_midstate(secp256k1_sha256 *hash, uint64_t bytes, const uint32_t state[8]); -static void secp256k1_sha256_write(secp256k1_sha256 *hash, const unsigned char *data, size_t size); -static void secp256k1_sha256_finalize(secp256k1_sha256 *hash, unsigned char *out32); +static void secp256k1_sha256_write(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *hash, const unsigned char *data, size_t size); +static void secp256k1_sha256_finalize(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *hash, unsigned char *out32); static void secp256k1_sha256_clear(secp256k1_sha256 *hash); typedef struct { secp256k1_sha256 inner, outer; } secp256k1_hmac_sha256; -static void secp256k1_hmac_sha256_initialize(secp256k1_hmac_sha256 *hash, const unsigned char *key, size_t size); -static void secp256k1_hmac_sha256_write(secp256k1_hmac_sha256 *hash, const unsigned char *data, size_t size); -static void secp256k1_hmac_sha256_finalize(secp256k1_hmac_sha256 *hash, unsigned char *out32); +static void secp256k1_hmac_sha256_initialize(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, const unsigned char *key, size_t size); +static void secp256k1_hmac_sha256_write(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, const unsigned char *data, size_t size); +static void secp256k1_hmac_sha256_finalize(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, unsigned char *out32); static void secp256k1_hmac_sha256_clear(secp256k1_hmac_sha256 *hash); typedef struct { @@ -40,8 +46,8 @@ typedef struct { int retry; } secp256k1_rfc6979_hmac_sha256; -static void secp256k1_rfc6979_hmac_sha256_initialize(secp256k1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen); -static void secp256k1_rfc6979_hmac_sha256_generate(secp256k1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen); +static void secp256k1_rfc6979_hmac_sha256_initialize(const secp256k1_hash_ctx *hash_ctx, secp256k1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen); +static void secp256k1_rfc6979_hmac_sha256_generate(const secp256k1_hash_ctx *hash_ctx, secp256k1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen); static void secp256k1_rfc6979_hmac_sha256_finalize(secp256k1_rfc6979_hmac_sha256 *rng); static void secp256k1_rfc6979_hmac_sha256_clear(secp256k1_rfc6979_hmac_sha256 *rng); diff --git a/src/hash_impl.h b/src/hash_impl.h index da3b46609..7c40f82e7 100644 --- a/src/hash_impl.h +++ b/src/hash_impl.h @@ -48,7 +48,7 @@ static void secp256k1_sha256_initialize_midstate(secp256k1_sha256 *hash, uint64_ } /** Perform one SHA-256 transformation, processing 16 big endian 32-bit words. */ -static void secp256k1_sha256_transform(uint32_t* s, const unsigned char* buf) { +static void secp256k1_sha256_transform_impl(uint32_t* s, const unsigned char* buf) { uint32_t a = s[0], b = s[1], c = s[2], d = s[3], e = s[4], f = s[5], g = s[6], h = s[7]; uint32_t w0, w1, w2, w3, w4, w5, w6, w7, w8, w9, w10, w11, w12, w13, w14, w15; @@ -130,26 +130,52 @@ static void secp256k1_sha256_transform(uint32_t* s, const unsigned char* buf) { s[7] += h; } -static void secp256k1_sha256_write(secp256k1_sha256 *hash, const unsigned char *data, size_t len) { +static void secp256k1_sha256_transform(uint32_t *state, const unsigned char *blocks64, size_t n_blocks) { + while (n_blocks--) { + secp256k1_sha256_transform_impl(state, blocks64); + blocks64 += 64; + } +} + +static void secp256k1_hash_ctx_init(secp256k1_hash_ctx *hash_ctx) { + VERIFY_CHECK(hash_ctx != NULL); + hash_ctx->fn_sha256_compression = secp256k1_sha256_transform; +} + +static void secp256k1_sha256_write(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *hash, const unsigned char *data, size_t len) { + size_t chunk_len; size_t bufsize = hash->bytes & 0x3F; hash->bytes += len; VERIFY_CHECK(hash->bytes >= len); - while (len >= 64 - bufsize) { - /* Fill the buffer, and process it. */ - size_t chunk_len = 64 - bufsize; + VERIFY_CHECK(hash_ctx != NULL); + VERIFY_CHECK(hash_ctx->fn_sha256_compression != NULL); + + /* If we exceed the 64-byte block size with this input, process it and wipe the buffer */ + chunk_len = 64 - bufsize; + if (bufsize && len >= chunk_len) { memcpy(hash->buf + bufsize, data, chunk_len); data += chunk_len; len -= chunk_len; - secp256k1_sha256_transform(hash->s, hash->buf); + hash_ctx->fn_sha256_compression(hash->s, hash->buf, 1); bufsize = 0; } + + /* If we still have data to process, invoke compression directly on the input */ + if (len >= 64) { + const size_t n_blocks = len / 64; + const size_t advance = n_blocks * 64; + hash_ctx->fn_sha256_compression(hash->s, data, n_blocks); + data += advance; + len -= advance; + } + + /* Fill the buffer with what remains */ if (len) { - /* Fill the buffer with what remains. */ memcpy(hash->buf + bufsize, data, len); } } -static void secp256k1_sha256_finalize(secp256k1_sha256 *hash, unsigned char *out32) { +static void secp256k1_sha256_finalize(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *hash, unsigned char *out32) { static const unsigned char pad[64] = {0x80}; unsigned char sizedesc[8]; int i; @@ -157,8 +183,8 @@ static void secp256k1_sha256_finalize(secp256k1_sha256 *hash, unsigned char *out VERIFY_CHECK(hash->bytes < ((uint64_t)1 << 61)); secp256k1_write_be32(&sizedesc[0], hash->bytes >> 29); secp256k1_write_be32(&sizedesc[4], hash->bytes << 3); - secp256k1_sha256_write(hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64)); - secp256k1_sha256_write(hash, sizedesc, 8); + secp256k1_sha256_write(hash_ctx, hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64)); + secp256k1_sha256_write(hash_ctx, hash, sizedesc, 8); for (i = 0; i < 8; i++) { secp256k1_write_be32(&out32[4*i], hash->s[i]); hash->s[i] = 0; @@ -167,22 +193,22 @@ static void secp256k1_sha256_finalize(secp256k1_sha256 *hash, unsigned char *out /* Initializes a sha256 struct and writes the 64 byte string * SHA256(tag)||SHA256(tag) into it. */ -static void secp256k1_sha256_initialize_tagged(secp256k1_sha256 *hash, const unsigned char *tag, size_t taglen) { +static void secp256k1_sha256_initialize_tagged(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *hash, const unsigned char *tag, size_t taglen) { unsigned char buf[32]; secp256k1_sha256_initialize(hash); - secp256k1_sha256_write(hash, tag, taglen); - secp256k1_sha256_finalize(hash, buf); + secp256k1_sha256_write(hash_ctx, hash, tag, taglen); + secp256k1_sha256_finalize(hash_ctx, hash, buf); secp256k1_sha256_initialize(hash); - secp256k1_sha256_write(hash, buf, 32); - secp256k1_sha256_write(hash, buf, 32); + secp256k1_sha256_write(hash_ctx, hash, buf, 32); + secp256k1_sha256_write(hash_ctx, hash, buf, 32); } static void secp256k1_sha256_clear(secp256k1_sha256 *hash) { secp256k1_memclear_explicit(hash, sizeof(*hash)); } -static void secp256k1_hmac_sha256_initialize(secp256k1_hmac_sha256 *hash, const unsigned char *key, size_t keylen) { +static void secp256k1_hmac_sha256_initialize(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, const unsigned char *key, size_t keylen) { size_t n; unsigned char rkey[64]; if (keylen <= sizeof(rkey)) { @@ -191,8 +217,8 @@ static void secp256k1_hmac_sha256_initialize(secp256k1_hmac_sha256 *hash, const } else { secp256k1_sha256 sha256; secp256k1_sha256_initialize(&sha256); - secp256k1_sha256_write(&sha256, key, keylen); - secp256k1_sha256_finalize(&sha256, rkey); + secp256k1_sha256_write(hash_ctx, &sha256, key, keylen); + secp256k1_sha256_finalize(hash_ctx, &sha256, rkey); memset(rkey + 32, 0, 32); } @@ -200,33 +226,33 @@ static void secp256k1_hmac_sha256_initialize(secp256k1_hmac_sha256 *hash, const for (n = 0; n < sizeof(rkey); n++) { rkey[n] ^= 0x5c; } - secp256k1_sha256_write(&hash->outer, rkey, sizeof(rkey)); + secp256k1_sha256_write(hash_ctx, &hash->outer, rkey, sizeof(rkey)); secp256k1_sha256_initialize(&hash->inner); for (n = 0; n < sizeof(rkey); n++) { rkey[n] ^= 0x5c ^ 0x36; } - secp256k1_sha256_write(&hash->inner, rkey, sizeof(rkey)); + secp256k1_sha256_write(hash_ctx, &hash->inner, rkey, sizeof(rkey)); secp256k1_memclear_explicit(rkey, sizeof(rkey)); } -static void secp256k1_hmac_sha256_write(secp256k1_hmac_sha256 *hash, const unsigned char *data, size_t size) { - secp256k1_sha256_write(&hash->inner, data, size); +static void secp256k1_hmac_sha256_write(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, const unsigned char *data, size_t size) { + secp256k1_sha256_write(hash_ctx, &hash->inner, data, size); } -static void secp256k1_hmac_sha256_finalize(secp256k1_hmac_sha256 *hash, unsigned char *out32) { +static void secp256k1_hmac_sha256_finalize(const secp256k1_hash_ctx *hash_ctx, secp256k1_hmac_sha256 *hash, unsigned char *out32) { unsigned char temp[32]; - secp256k1_sha256_finalize(&hash->inner, temp); - secp256k1_sha256_write(&hash->outer, temp, 32); + secp256k1_sha256_finalize(hash_ctx, &hash->inner, temp); + secp256k1_sha256_write(hash_ctx, &hash->outer, temp, 32); secp256k1_memclear_explicit(temp, sizeof(temp)); - secp256k1_sha256_finalize(&hash->outer, out32); + secp256k1_sha256_finalize(hash_ctx, &hash->outer, out32); } static void secp256k1_hmac_sha256_clear(secp256k1_hmac_sha256 *hash) { secp256k1_memclear_explicit(hash, sizeof(*hash)); } -static void secp256k1_rfc6979_hmac_sha256_initialize(secp256k1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) { +static void secp256k1_rfc6979_hmac_sha256_initialize(const secp256k1_hash_ctx *hash_ctx, secp256k1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) { secp256k1_hmac_sha256 hmac; static const unsigned char zero[1] = {0x00}; static const unsigned char one[1] = {0x01}; @@ -235,47 +261,47 @@ static void secp256k1_rfc6979_hmac_sha256_initialize(secp256k1_rfc6979_hmac_sha2 memset(rng->k, 0x00, 32); /* RFC6979 3.2.c. */ /* RFC6979 3.2.d. */ - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_write(&hmac, zero, 1); - secp256k1_hmac_sha256_write(&hmac, key, keylen); - secp256k1_hmac_sha256_finalize(&hmac, rng->k); - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_finalize(&hmac, rng->v); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, zero, 1); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, key, keylen); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->k); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->v); /* RFC6979 3.2.f. */ - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_write(&hmac, one, 1); - secp256k1_hmac_sha256_write(&hmac, key, keylen); - secp256k1_hmac_sha256_finalize(&hmac, rng->k); - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_finalize(&hmac, rng->v); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, one, 1); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, key, keylen); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->k); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->v); rng->retry = 0; } -static void secp256k1_rfc6979_hmac_sha256_generate(secp256k1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) { +static void secp256k1_rfc6979_hmac_sha256_generate(const secp256k1_hash_ctx *hash_ctx, secp256k1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) { /* RFC6979 3.2.h. */ static const unsigned char zero[1] = {0x00}; if (rng->retry) { secp256k1_hmac_sha256 hmac; - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_write(&hmac, zero, 1); - secp256k1_hmac_sha256_finalize(&hmac, rng->k); - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_finalize(&hmac, rng->v); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, zero, 1); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->k); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->v); } while (outlen > 0) { secp256k1_hmac_sha256 hmac; size_t now = outlen; - secp256k1_hmac_sha256_initialize(&hmac, rng->k, 32); - secp256k1_hmac_sha256_write(&hmac, rng->v, 32); - secp256k1_hmac_sha256_finalize(&hmac, rng->v); + secp256k1_hmac_sha256_initialize(hash_ctx, &hmac, rng->k, 32); + secp256k1_hmac_sha256_write(hash_ctx, &hmac, rng->v, 32); + secp256k1_hmac_sha256_finalize(hash_ctx, &hmac, rng->v); if (now > 32) { now = 32; } diff --git a/src/modules/bppp/bppp_norm_product_impl.h b/src/modules/bppp/bppp_norm_product_impl.h index daf7b0da7..0abd3cd47 100644 --- a/src/modules/bppp/bppp_norm_product_impl.h +++ b/src/modules/bppp/bppp_norm_product_impl.h @@ -236,6 +236,7 @@ static int secp256k1_bppp_rangeproof_norm_product_prove( secp256k1_scalar* c_vec, size_t c_vec_len ) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_scalar mu_f, rho_f = *rho; size_t proof_idx = 0; ecmult_x_cb_data x_cb_data; @@ -316,8 +317,8 @@ static int secp256k1_bppp_rangeproof_norm_product_prove( proof_idx += 65; /* Obtain challenge gamma for the the next round */ - secp256k1_sha256_write(transcript, &proof[proof_idx - 65], 65); - secp256k1_bppp_challenge_scalar(&gamma, transcript, 0); + secp256k1_sha256_write(hash_ctx, transcript, &proof[proof_idx - 65], 65); + secp256k1_bppp_challenge_scalar(hash_ctx, &gamma, transcript, 0); if (g_len > 1) { for (i = 0; i < g_len; i = i + 2) { @@ -434,6 +435,7 @@ static int secp256k1_bppp_rangeproof_norm_product_verify( size_t c_vec_len, const secp256k1_ge* commit ) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_scalar rho_f, mu_f, v, n, l, rho_inv, h_c; secp256k1_scalar *gammas, *s_g, *s_h, *rho_inv_pows; secp256k1_gej res1, res2; @@ -487,8 +489,8 @@ static int secp256k1_bppp_rangeproof_norm_product_verify( for (i = 0; i < n_rounds; i++) { secp256k1_scalar gamma; - secp256k1_sha256_write(transcript, &proof[i * 65], 65); - secp256k1_bppp_challenge_scalar(&gamma, transcript, 0); + secp256k1_sha256_write(hash_ctx, transcript, &proof[i * 65], 65); + secp256k1_bppp_challenge_scalar(hash_ctx, &gamma, transcript, 0); gammas[i] = gamma; } /* s_g[0] = n * \prod_{j=0}^{log_g_len - 1} rho^(2^j) diff --git a/src/modules/bppp/bppp_transcript_impl.h b/src/modules/bppp/bppp_transcript_impl.h index 5e2122313..53b86c5d2 100644 --- a/src/modules/bppp/bppp_transcript_impl.h +++ b/src/modules/bppp/bppp_transcript_impl.h @@ -22,12 +22,12 @@ static void secp256k1_bppp_sha256_tagged_commitment_init(secp256k1_sha256 *sha) } /* Obtain a challenge scalar from the current transcript.*/ -static void secp256k1_bppp_challenge_scalar(secp256k1_scalar* ch, const secp256k1_sha256 *transcript, uint64_t idx) { +static void secp256k1_bppp_challenge_scalar(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar* ch, const secp256k1_sha256 *transcript, uint64_t idx) { unsigned char buf[32]; secp256k1_sha256 sha = *transcript; secp256k1_bppp_le64(buf, idx); - secp256k1_sha256_write(&sha, buf, 8); - secp256k1_sha256_finalize(&sha, buf); + secp256k1_sha256_write(hash_ctx, &sha, buf, 8); + secp256k1_sha256_finalize(hash_ctx, &sha, buf); secp256k1_sha256_clear(&sha); secp256k1_scalar_set_b32(ch, buf, NULL); } diff --git a/src/modules/bppp/main_impl.h b/src/modules/bppp/main_impl.h index e8e9e30a0..8c4d1eabe 100644 --- a/src/modules/bppp/main_impl.h +++ b/src/modules/bppp/main_impl.h @@ -18,6 +18,7 @@ secp256k1_bppp_generators *secp256k1_bppp_generators_create(const secp256k1_context *ctx, size_t n) { secp256k1_bppp_generators *ret; secp256k1_rfc6979_hmac_sha256 rng; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); unsigned char seed[64]; size_t i; @@ -37,11 +38,11 @@ secp256k1_bppp_generators *secp256k1_bppp_generators_create(const secp256k1_cont secp256k1_fe_get_b32(&seed[0], &secp256k1_ge_const_g.x); secp256k1_fe_get_b32(&seed[32], &secp256k1_ge_const_g.y); - secp256k1_rfc6979_hmac_sha256_initialize(&rng, seed, 64); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, seed, 64); for (i = 0; i < n; i++) { secp256k1_generator gen; unsigned char tmp[32] = { 0 }; - secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, tmp, 32); CHECK(secp256k1_generator_generate(ctx, &gen, tmp)); secp256k1_generator_load(&ret->gens[i], &gen); } diff --git a/src/modules/bppp/tests_impl.h b/src/modules/bppp/tests_impl.h index dda9a8c0a..47a00c6d4 100644 --- a/src/modules/bppp/tests_impl.h +++ b/src/modules/bppp/tests_impl.h @@ -104,6 +104,7 @@ static void test_bppp_generators_fixed(void) { } static void test_bppp_tagged_hash(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned char tag_data[] = {'B', 'u', 'l', 'l', 'e', 't', 'p', 'r', 'o', 'o', 'f', 's', '_', 'p', 'p', '/', 'v', '0', '/', 'c', 'o', 'm', 'm', 'i', 't', 'm', 'e', 'n', 't'}; secp256k1_sha256 sha; secp256k1_sha256 sha_cached; @@ -111,10 +112,10 @@ static void test_bppp_tagged_hash(void) { unsigned char output_cached[32]; secp256k1_scalar s; - secp256k1_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, tag_data, sizeof(tag_data)); secp256k1_bppp_sha256_tagged_commitment_init(&sha_cached); - secp256k1_sha256_finalize(&sha, output); - secp256k1_sha256_finalize(&sha_cached, output_cached); + secp256k1_sha256_finalize(hash_ctx, &sha, output); + secp256k1_sha256_finalize(hash_ctx, &sha_cached, output_cached); CHECK(secp256k1_memcmp_var(output, output_cached, 32) == 0); { @@ -123,7 +124,7 @@ static void test_bppp_tagged_hash(void) { 0x8A, 0x41, 0xC6, 0x85, 0x1A, 0x79, 0x14, 0xFC, 0x48, 0x15, 0xC7, 0x2D, 0xF8, 0x63, 0x8F, 0x1B }; secp256k1_bppp_sha256_tagged_commitment_init(&sha); - secp256k1_bppp_challenge_scalar(&s, &sha, 0); + secp256k1_bppp_challenge_scalar(hash_ctx, &s, &sha, 0); secp256k1_scalar_get_b32(output, &s); CHECK(secp256k1_memcmp_var(output, expected, sizeof(output)) == 0); } @@ -134,8 +135,8 @@ static void test_bppp_tagged_hash(void) { 0x72, 0x7E, 0x3E, 0xB7, 0x10, 0x03, 0xF0, 0xE9, 0x69, 0x4D, 0xAA, 0x96, 0xCE, 0x98, 0xBB, 0x39, 0x1C, 0x2F, 0x7C, 0x2E, 0x1C, 0x17, 0x78, 0x6D }; - secp256k1_sha256_write(&sha, tmp, sizeof(tmp)); - secp256k1_bppp_challenge_scalar(&s, &sha, 0); + secp256k1_sha256_write(hash_ctx, &sha, tmp, sizeof(tmp)); + secp256k1_bppp_challenge_scalar(hash_ctx, &s, &sha, 0); secp256k1_scalar_get_b32(output, &s); CHECK(secp256k1_memcmp_var(output, expected, sizeof(output)) == 0); } @@ -279,6 +280,7 @@ static void secp256k1_norm_arg_commit_initial_data( const secp256k1_ge* commit ) { /* Commit to the initial public values */ + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned char ser_commit[33], ser_scalar[32], ser_le64[8]; size_t i; secp256k1_ge comm = *commit; @@ -287,24 +289,24 @@ static void secp256k1_norm_arg_commit_initial_data( secp256k1_fe_normalize(&comm.y); CHECK(secp256k1_ge_is_infinity(&comm) == 0); CHECK(secp256k1_bppp_serialize_pt(&ser_commit[0], &comm)); - secp256k1_sha256_write(transcript, ser_commit, 33); + secp256k1_sha256_write(hash_ctx, transcript, ser_commit, 33); secp256k1_scalar_get_b32(ser_scalar, rho); - secp256k1_sha256_write(transcript, ser_scalar, 32); + secp256k1_sha256_write(hash_ctx, transcript, ser_scalar, 32); secp256k1_bppp_le64(ser_le64, g_len); - secp256k1_sha256_write(transcript, ser_le64, 8); + secp256k1_sha256_write(hash_ctx, transcript, ser_le64, 8); secp256k1_bppp_le64(ser_le64, gens_vec->n); - secp256k1_sha256_write(transcript, ser_le64, 8); + secp256k1_sha256_write(hash_ctx, transcript, ser_le64, 8); for (i = 0; i < gens_vec->n; i++) { secp256k1_fe_normalize(&gens_vec->gens[i].x); secp256k1_fe_normalize(&gens_vec->gens[i].y); CHECK(secp256k1_bppp_serialize_pt(&ser_commit[0], &gens_vec->gens[i])); - secp256k1_sha256_write(transcript, ser_commit, 33); + secp256k1_sha256_write(hash_ctx, transcript, ser_commit, 33); } secp256k1_bppp_le64(ser_le64, c_vec_len); - secp256k1_sha256_write(transcript, ser_le64, 8); + secp256k1_sha256_write(hash_ctx, transcript, ser_le64, 8); for (i = 0; i < c_vec_len; i++) { secp256k1_scalar_get_b32(ser_scalar, &c_vec[i]); - secp256k1_sha256_write(transcript, ser_scalar, 32); + secp256k1_sha256_write(hash_ctx, transcript, ser_scalar, 32); } } @@ -561,7 +563,7 @@ int norm_arg_verify_vectors_helper(secp256k1_scratch *scratch, const unsigned ch return ret; } -#define IDX_TO_TEST(i) (norm_arg_verify_vectors_helper(scratch, verify_vector_gens, verify_vector_##i##_proof, sizeof(verify_vector_##i##_proof), verify_vector_##i##_r32, verify_vector_##i##_n_vec_len, verify_vector_##i##_c_vec32, verify_vector_##i##_c_vec, sizeof(verify_vector_##i##_c_vec)/sizeof(secp256k1_scalar), verify_vector_##i##_commit33) == verify_vector_##i##_result) +#define IDX_TO_TEST(i) (norm_arg_verify_vectors_helper(scratch, verify_vector_gens, verify_vector_##i##_proof, sizeof(verify_vector_##i##_proof), verify_vector_##i##_r32, verify_vector_##i##_n_vec_len, verify_vector_##i##_c_vec32, verify_vector_##i##_c_vec, ARRAY_SIZE(verify_vector_##i##_c_vec), verify_vector_##i##_commit33) == verify_vector_##i##_result) static void norm_arg_verify_vectors(void) { secp256k1_scratch *scratch = secp256k1_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ @@ -629,9 +631,9 @@ static void norm_arg_prove_vectors_helper(secp256k1_scratch *scratch, const unsi #define IDX_TO_TEST(i) (norm_arg_prove_vectors_helper(scratch, prove_vector_gens, prove_vector_##i##_proof, sizeof(prove_vector_##i##_proof), prove_vector_##i##_r32,\ - prove_vector_##i##_n_vec32, prove_vector_##i##_n_vec, sizeof(prove_vector_##i##_n_vec)/sizeof(secp256k1_scalar),\ + prove_vector_##i##_n_vec32, prove_vector_##i##_n_vec, ARRAY_SIZE(prove_vector_##i##_n_vec),\ prove_vector_##i##_l_vec32, prove_vector_##i##_l_vec,\ - prove_vector_##i##_c_vec32, prove_vector_##i##_c_vec, sizeof(prove_vector_##i##_c_vec)/sizeof(secp256k1_scalar), \ + prove_vector_##i##_c_vec32, prove_vector_##i##_c_vec, ARRAY_SIZE(prove_vector_##i##_c_vec), \ prove_vector_##i##_result)) static void norm_arg_prove_vectors(void) { diff --git a/src/modules/ecdh/main_impl.h b/src/modules/ecdh/main_impl.h index 9f2dfdd56..b0359b2c4 100644 --- a/src/modules/ecdh/main_impl.h +++ b/src/modules/ecdh/main_impl.h @@ -10,20 +10,24 @@ #include "../../../include/secp256k1_ecdh.h" #include "../../ecmult_const_impl.h" -static int ecdh_hash_function_sha256(unsigned char *output, const unsigned char *x32, const unsigned char *y32, void *data) { +static int ecdh_hash_function_sha256_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char *output, const unsigned char *x32, const unsigned char *y32, void *data) { unsigned char version = (y32[31] & 0x01) | 0x02; secp256k1_sha256 sha; (void)data; secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, &version, 1); - secp256k1_sha256_write(&sha, x32, 32); - secp256k1_sha256_finalize(&sha, output); + secp256k1_sha256_write(hash_ctx, &sha, &version, 1); + secp256k1_sha256_write(hash_ctx, &sha, x32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, output); secp256k1_sha256_clear(&sha); return 1; } +static int ecdh_hash_function_sha256(unsigned char *output, const unsigned char *x32, const unsigned char *y32, void *data) { + return ecdh_hash_function_sha256_impl(secp256k1_get_hash_context(secp256k1_context_static), output, x32, y32, data); +} + const secp256k1_ecdh_hash_function secp256k1_ecdh_hash_function_sha256 = ecdh_hash_function_sha256; const secp256k1_ecdh_hash_function secp256k1_ecdh_hash_function_default = ecdh_hash_function_sha256; @@ -41,10 +45,6 @@ int secp256k1_ecdh(const secp256k1_context* ctx, unsigned char *output, const se ARG_CHECK(point != NULL); ARG_CHECK(scalar != NULL); - if (hashfp == NULL) { - hashfp = secp256k1_ecdh_hash_function_default; - } - secp256k1_pubkey_load(ctx, &pt, point); secp256k1_scalar_set_b32(&s, scalar, &overflow); @@ -60,7 +60,12 @@ int secp256k1_ecdh(const secp256k1_context* ctx, unsigned char *output, const se secp256k1_fe_get_b32(x, &pt.x); secp256k1_fe_get_b32(y, &pt.y); - ret = hashfp(output, x, y, data); + if (hashfp == NULL) { + /* Use ctx-aware function by default */ + ret = ecdh_hash_function_sha256_impl(secp256k1_get_hash_context(ctx), output, x, y, data); + } else { + ret = hashfp(output, x, y, data); + } secp256k1_memclear_explicit(x, sizeof(x)); secp256k1_memclear_explicit(y, sizeof(y)); diff --git a/src/modules/ecdh/tests_impl.h b/src/modules/ecdh/tests_impl.h index cb1d953d2..c75ce9ff6 100644 --- a/src/modules/ecdh/tests_impl.h +++ b/src/modules/ecdh/tests_impl.h @@ -8,6 +8,7 @@ #define SECP256K1_MODULE_ECDH_TESTS_H #include "../../unit_test.h" +#include "../../testutil.h" static int ecdh_hash_function_test_xpassthru(unsigned char *output, const unsigned char *x, const unsigned char *y, void *data) { (void)y; @@ -83,13 +84,37 @@ static void test_ecdh_generator_basepoint(void) { /* compute "explicitly" */ CHECK(secp256k1_ec_pubkey_serialize(CTX, point_ser, &point_ser_len, &point[1], SECP256K1_EC_COMPRESSED) == 1); secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, point_ser, point_ser_len); - secp256k1_sha256_finalize(&sha, output_ser); + secp256k1_sha256_write(secp256k1_get_hash_context(CTX), &sha, point_ser, point_ser_len); + secp256k1_sha256_finalize(secp256k1_get_hash_context(CTX), &sha, output_ser); /* compare */ CHECK(secp256k1_memcmp_var(output_ecdh, output_ser, 32) == 0); } } +DEFINE_SHA256_TRANSFORM_PROBE(sha256_ecdh) +static void test_ecdh_ctx_sha256(void) { + /* Check ctx-provided SHA256 compression override takes effect */ + secp256k1_context *ctx = secp256k1_context_clone(CTX); + unsigned char out_default[65], out_custom[65]; + const unsigned char sk[32] = {1}; + secp256k1_pubkey pubkey; + CHECK(secp256k1_ec_pubkey_create(ctx, &pubkey, sk) == 1); + + /* Default behavior */ + CHECK(secp256k1_ecdh(ctx, out_default, &pubkey, sk, NULL, NULL) == 1); + CHECK(!sha256_ecdh_called); + + /* Override SHA256 compression directly, bypassing the ctx setter sanity checks */ + ctx->hash_ctx.fn_sha256_compression = sha256_ecdh; + CHECK(secp256k1_ecdh(ctx, out_custom, &pubkey, sk, NULL, NULL) == 1); + + /* Outputs must differ if custom compression was used */ + CHECK(secp256k1_memcmp_var(out_default, out_custom, 32) != 0); + CHECK(sha256_ecdh_called); + + secp256k1_context_destroy(ctx); +} + static void test_bad_scalar(void) { unsigned char s_zero[32] = { 0 }; unsigned char s_overflow[32] = { 0 }; @@ -187,6 +212,7 @@ static const struct tf_test_entry tests_ecdh[] = { CASE1(test_bad_scalar), CASE1(test_result_basepoint), CASE1(test_ecdh_wycheproof), + CASE1(test_ecdh_ctx_sha256), }; #endif /* SECP256K1_MODULE_ECDH_TESTS_H */ diff --git a/src/modules/ecdsa_adaptor/dleq_impl.h b/src/modules/ecdsa_adaptor/dleq_impl.h index a117d69d7..b05c0b188 100644 --- a/src/modules/ecdsa_adaptor/dleq_impl.h +++ b/src/modules/ecdsa_adaptor/dleq_impl.h @@ -24,14 +24,14 @@ static void secp256k1_nonce_function_dleq_sha256_tagged(secp256k1_sha256 *sha) { /* algo argument for nonce_function_ecdsa_adaptor to derive the nonce using a tagged hash function. */ static const unsigned char dleq_algo[] = {'D','L','E','Q'}; -static void secp256k1_dleq_hash_point(secp256k1_sha256 *sha, secp256k1_ge *p) { +static void secp256k1_dleq_hash_point(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *sha, secp256k1_ge *p) { unsigned char buf[33]; secp256k1_eckey_pubkey_serialize33(p, buf); - secp256k1_sha256_write(sha, buf, 33); + secp256k1_sha256_write(hash_ctx, sha, buf, 33); } -static int secp256k1_dleq_nonce(secp256k1_scalar *k, const unsigned char *sk32, const unsigned char *gen2_33, const unsigned char *p1_33, const unsigned char *p2_33, secp256k1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { +static int secp256k1_dleq_nonce(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *k, const unsigned char *sk32, const unsigned char *gen2_33, const unsigned char *p1_33, const unsigned char *p2_33, secp256k1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { secp256k1_sha256 sha; unsigned char buf[32]; unsigned char nonce[32]; @@ -41,9 +41,9 @@ static int secp256k1_dleq_nonce(secp256k1_scalar *k, const unsigned char *sk32, } secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, p1_33, 33); - secp256k1_sha256_write(&sha, p2_33, 33); - secp256k1_sha256_finalize(&sha, buf); + secp256k1_sha256_write(hash_ctx, &sha, p1_33, 33); + secp256k1_sha256_write(hash_ctx, &sha, p2_33, 33); + secp256k1_sha256_finalize(hash_ctx, &sha, buf); secp256k1_sha256_clear(&sha); if (!noncefp(nonce, buf, sk32, gen2_33, dleq_algo, sizeof(dleq_algo), ndata)) { @@ -59,17 +59,17 @@ static int secp256k1_dleq_nonce(secp256k1_scalar *k, const unsigned char *sk32, /* Generates a challenge as defined in the DLC Specification at * https://github.com/discreetlogcontracts/dlcspecs */ -static void secp256k1_dleq_challenge(secp256k1_scalar *e, secp256k1_ge *gen2, secp256k1_ge *r1, secp256k1_ge *r2, secp256k1_ge *p1, secp256k1_ge *p2) { +static void secp256k1_dleq_challenge(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *e, secp256k1_ge *gen2, secp256k1_ge *r1, secp256k1_ge *r2, secp256k1_ge *p1, secp256k1_ge *p2) { unsigned char buf[32]; secp256k1_sha256 sha; secp256k1_nonce_function_dleq_sha256_tagged(&sha); - secp256k1_dleq_hash_point(&sha, p1); - secp256k1_dleq_hash_point(&sha, gen2); - secp256k1_dleq_hash_point(&sha, p2); - secp256k1_dleq_hash_point(&sha, r1); - secp256k1_dleq_hash_point(&sha, r2); - secp256k1_sha256_finalize(&sha, buf); + secp256k1_dleq_hash_point(hash_ctx, &sha, p1); + secp256k1_dleq_hash_point(hash_ctx, &sha, gen2); + secp256k1_dleq_hash_point(hash_ctx, &sha, p2); + secp256k1_dleq_hash_point(hash_ctx, &sha, r1); + secp256k1_dleq_hash_point(hash_ctx, &sha, r2); + secp256k1_sha256_finalize(hash_ctx, &sha, buf); secp256k1_sha256_clear(&sha); secp256k1_scalar_set_b32(e, buf, NULL); @@ -89,6 +89,7 @@ static void secp256k1_dleq_pair(const secp256k1_ecmult_gen_context *ecmult_gen_c static int secp256k1_dleq_prove(const secp256k1_context* ctx, secp256k1_scalar *s, secp256k1_scalar *e, const secp256k1_scalar *sk, secp256k1_ge *p1, secp256k1_ge *gen2, secp256k1_ge *p2, secp256k1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { /* Note: r[2] and k are local to the DLEQ proof, and they differ from the * values with the same identifiers in main_impl.h. */ + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_ge r[2]; secp256k1_scalar k = { 0 }; unsigned char sk32[32]; @@ -103,7 +104,7 @@ static int secp256k1_dleq_prove(const secp256k1_context* ctx, secp256k1_scalar * secp256k1_scalar_get_b32(sk32, sk); - ret = secp256k1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, noncefp, ndata); + ret = secp256k1_dleq_nonce(hash_ctx, &k, sk32, gen2_33, p1_33, p2_33, noncefp, ndata); secp256k1_declassify(ctx, &ret, sizeof(ret)); if (!ret) { secp256k1_memclear_explicit(sk32, sizeof(sk32)); @@ -118,7 +119,7 @@ static int secp256k1_dleq_prove(const secp256k1_context* ctx, secp256k1_scalar * /* e = tagged hash(p1, gen2, p2, r[0], r[1]) */ /* s = k + e * sk */ - secp256k1_dleq_challenge(e, gen2, &r[0], &r[1], p1, p2); + secp256k1_dleq_challenge(hash_ctx, e, gen2, &r[0], &r[1], p1, p2); secp256k1_scalar_mul(s, e, sk); secp256k1_scalar_add(s, s, &k); @@ -127,7 +128,7 @@ static int secp256k1_dleq_prove(const secp256k1_context* ctx, secp256k1_scalar * return 1; } -static int secp256k1_dleq_verify(const secp256k1_scalar *s, const secp256k1_scalar *e, secp256k1_ge *p1, secp256k1_ge *gen2, secp256k1_ge *p2) { +static int secp256k1_dleq_verify(const secp256k1_hash_ctx *hash_ctx, const secp256k1_scalar *s, const secp256k1_scalar *e, secp256k1_ge *p1, secp256k1_ge *gen2, secp256k1_ge *p2) { secp256k1_scalar e_neg; secp256k1_scalar e_expected; secp256k1_gej gen2j; @@ -154,7 +155,7 @@ static int secp256k1_dleq_verify(const secp256k1_scalar *s, const secp256k1_scal secp256k1_ge_set_all_gej_var(r, rj, 2); - secp256k1_dleq_challenge(&e_expected, gen2, &r[0], &r[1], p1, p2); + secp256k1_dleq_challenge(hash_ctx, &e_expected, gen2, &r[0], &r[1], p1, p2); secp256k1_scalar_add(&e_expected, &e_expected, &e_neg); return secp256k1_scalar_is_zero(&e_expected); diff --git a/src/modules/ecdsa_adaptor/main_impl.h b/src/modules/ecdsa_adaptor/main_impl.h index 0d5909079..19fe1b093 100644 --- a/src/modules/ecdsa_adaptor/main_impl.h +++ b/src/modules/ecdsa_adaptor/main_impl.h @@ -90,7 +90,7 @@ static void secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(secp256k1_s static const unsigned char ecdsa_adaptor_algo[] = {'E', 'C', 'D', 'S', 'A', 'a', 'd', 'a', 'p', 't', 'o', 'r', '/', 'n', 'o', 'n'}; /* Modified BIP-340 nonce function */ -static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *pk33, const unsigned char *algo, size_t algolen, void *data) { +static int nonce_function_ecdsa_adaptor_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *pk33, const unsigned char *algo, size_t algolen, void *data) { secp256k1_sha256 sha; unsigned char masked_key[32]; int i; @@ -101,8 +101,8 @@ static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned c if (data != NULL) { secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha); - secp256k1_sha256_write(&sha, data, 32); - secp256k1_sha256_finalize(&sha, masked_key); + secp256k1_sha256_write(hash_ctx, &sha, data, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, masked_key); secp256k1_sha256_clear(&sha); for (i = 0; i < 32; i++) { masked_key[i] ^= key32[i]; @@ -119,22 +119,34 @@ static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned c && secp256k1_memcmp_var(algo, dleq_algo, algolen) == 0) { secp256k1_nonce_function_dleq_sha256_tagged(&sha); } else { - secp256k1_sha256_initialize_tagged(&sha, algo, algolen); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, algo, algolen); } /* Hash (masked-)key||pk||msg using the tagged hash as per BIP-340 */ if (data != NULL) { - secp256k1_sha256_write(&sha, masked_key, 32); + secp256k1_sha256_write(hash_ctx, &sha, masked_key, 32); } else { - secp256k1_sha256_write(&sha, key32, 32); + secp256k1_sha256_write(hash_ctx, &sha, key32, 32); } - secp256k1_sha256_write(&sha, pk33, 33); - secp256k1_sha256_write(&sha, msg32, 32); - secp256k1_sha256_finalize(&sha, nonce32); + secp256k1_sha256_write(hash_ctx, &sha, pk33, 33); + secp256k1_sha256_write(hash_ctx, &sha, msg32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, nonce32); secp256k1_sha256_clear(&sha); return 1; } +static int nonce_function_ecdsa_adaptor( + unsigned char *nonce32, + const unsigned char *msg32, + const unsigned char *key32, + const unsigned char *pk33, + const unsigned char *algo, + size_t algolen, + void *data) +{ + return nonce_function_ecdsa_adaptor_impl(secp256k1_get_hash_context(secp256k1_context_static),nonce32, msg32, key32, pk33, algo, algolen, data); +} + const secp256k1_nonce_function_hardened_ecdsa_adaptor secp256k1_nonce_function_ecdsa_adaptor = nonce_function_ecdsa_adaptor; int secp256k1_ecdsa_adaptor_encrypt(const secp256k1_context* ctx, unsigned char *adaptor_sig162, unsigned char *seckey32, const secp256k1_pubkey *enckey, const unsigned char *msg32, secp256k1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { @@ -232,6 +244,7 @@ int secp256k1_ecdsa_adaptor_verify(const secp256k1_context* ctx, const unsigned secp256k1_gej derived_rp; secp256k1_scalar sn, u1, u2; secp256k1_gej pubkeyj; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); VERIFY_CHECK(ctx != NULL); ARG_CHECK(adaptor_sig162 != NULL); @@ -246,7 +259,7 @@ int secp256k1_ecdsa_adaptor_verify(const secp256k1_context* ctx, const unsigned return 0; } /* DLEQ_verify((R', Y, R), dleq_proof) */ - if(!secp256k1_dleq_verify(&dleq_proof_s, &dleq_proof_e, &rp, &enckey_ge, &r)) { + if(!secp256k1_dleq_verify(hash_ctx, &dleq_proof_s, &dleq_proof_e, &rp, &enckey_ge, &r)) { return 0; } secp256k1_scalar_set_b32(&msg, msg32, NULL); diff --git a/src/modules/ecdsa_adaptor/tests_impl.h b/src/modules/ecdsa_adaptor/tests_impl.h index 090589973..693cc7dde 100644 --- a/src/modules/ecdsa_adaptor/tests_impl.h +++ b/src/modules/ecdsa_adaptor/tests_impl.h @@ -20,15 +20,17 @@ static void rand_point(secp256k1_ge *point) { } static void dleq_nonce_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_scalar k1, k2; - CHECK(secp256k1_dleq_nonce(&k1, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); + CHECK(secp256k1_dleq_nonce(hash_ctx, &k1, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); testrand_flip(args[n_flip], n_bytes); - CHECK(secp256k1_dleq_nonce(&k2, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); + CHECK(secp256k1_dleq_nonce(hash_ctx, &k2, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); CHECK(secp256k1_scalar_eq(&k1, &k2) == 0); } static void dleq_tests_internal(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_scalar s, e, sk, k; secp256k1_ge gen2, p1, p2; secp256k1_ge p[2]; @@ -46,20 +48,20 @@ static void dleq_tests_internal(void) { p1 = p[0]; p2 = p[1]; CHECK(secp256k1_dleq_prove(CTX, &s, &e, &sk, &p1, &gen2, &p2, NULL, NULL) == 1); - CHECK(secp256k1_dleq_verify(&s, &e, &p1, &gen2, &p2) == 1); + CHECK(secp256k1_dleq_verify(hash_ctx, &s, &e, &p1, &gen2, &p2) == 1); { secp256k1_scalar tmp; secp256k1_scalar_set_int(&tmp, 1); - CHECK(secp256k1_dleq_verify(&tmp, &e, &p1, &gen2, &p2) == 0); - CHECK(secp256k1_dleq_verify(&s, &tmp, &p1, &gen2, &p2) == 0); + CHECK(secp256k1_dleq_verify(hash_ctx, &tmp, &e, &p1, &gen2, &p2) == 0); + CHECK(secp256k1_dleq_verify(hash_ctx, &s, &tmp, &p1, &gen2, &p2) == 0); } { secp256k1_ge p_tmp; rand_point(&p_tmp); - CHECK(secp256k1_dleq_verify(&s, &e, &p_tmp, &gen2, &p2) == 0); - CHECK(secp256k1_dleq_verify(&s, &e, &p1, &p_tmp, &p2) == 0); - CHECK(secp256k1_dleq_verify(&s, &e, &p1, &gen2, &p_tmp) == 0); + CHECK(secp256k1_dleq_verify(hash_ctx, &s, &e, &p_tmp, &gen2, &p2) == 0); + CHECK(secp256k1_dleq_verify(hash_ctx, &s, &e, &p1, &p_tmp, &p2) == 0); + CHECK(secp256k1_dleq_verify(hash_ctx, &s, &e, &p1, &gen2, &p_tmp) == 0); } /* Nonce tests */ @@ -67,7 +69,7 @@ static void dleq_tests_internal(void) { secp256k1_eckey_pubkey_serialize33(&gen2, gen2_33); secp256k1_eckey_pubkey_serialize33(&p1, p1_33); secp256k1_eckey_pubkey_serialize33(&p2, p2_33); - CHECK(secp256k1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); + CHECK(secp256k1_dleq_nonce(hash_ctx, &k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); testrand_bytes_test(sk32, sizeof(sk32)); testrand_bytes_test(gen2_33, sizeof(gen2_33)); @@ -93,7 +95,7 @@ static void dleq_tests_internal(void) { } /* NULL aux_rand argument is allowed. */ - CHECK(secp256k1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); + CHECK(secp256k1_dleq_nonce(hash_ctx, &k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); } static void rand_flip_bit(unsigned char *array, size_t n) { @@ -715,6 +717,7 @@ static void nonce_function_ecdsa_adaptor_bitflip(unsigned char **args, size_t n_ } static void run_nonce_function_ecdsa_adaptor_tests(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); static const unsigned char tag[] = {'E', 'C', 'D', 'S', 'A', 'a', 'd', 'a', 'p', 't', 'o', 'r', '/', 'n', 'o', 'n'}; static const unsigned char aux_tag[] = {'E', 'C', 'D', 'S', 'A', 'a', 'd', 'a', 'p', 't', 'o', 'r', '/', 'a', 'u', 'x'}; unsigned char algo[] = {'E', 'C', 'D', 'S', 'A', 'a', 'd', 'a', 'p', 't', 'o', 'r', '/', 'n', 'o', 'n'}; @@ -733,19 +736,19 @@ static void run_nonce_function_ecdsa_adaptor_tests(void) { * secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged has the expected * state. */ secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, tag, sizeof(tag)); /* Check that hash initialized by * secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged_aux has the expected * state. */ secp256k1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, aux_tag, sizeof(aux_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, aux_tag, sizeof(aux_tag)); /* Check that hash initialized by * secp256k1_nonce_function_dleq_sha256_tagged_aux has the expected * state. */ secp256k1_nonce_function_dleq_sha256_tagged(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, dleq_tag, sizeof(dleq_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, dleq_tag, sizeof(dleq_tag)); testrand_bytes_test(msg, sizeof(msg)); testrand_bytes_test(key, sizeof(key)); diff --git a/src/modules/ecdsa_s2c/main_impl.h b/src/modules/ecdsa_s2c/main_impl.h index cdc54737b..d8d853845 100644 --- a/src/modules/ecdsa_s2c/main_impl.h +++ b/src/modules/ecdsa_s2c/main_impl.h @@ -55,6 +55,7 @@ static void secp256k1_s2c_ecdsa_data_sha256_tagged(secp256k1_sha256 *sha) { int secp256k1_ecdsa_s2c_sign(const secp256k1_context* ctx, secp256k1_ecdsa_signature* signature, secp256k1_ecdsa_s2c_opening* s2c_opening, const unsigned char *msg32, const unsigned char *seckey, const unsigned char* s2c_data32) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_scalar r, s; int ret; unsigned char ndata[32]; @@ -72,8 +73,8 @@ int secp256k1_ecdsa_s2c_sign(const secp256k1_context* ctx, secp256k1_ecdsa_signa * to derive nonces even if only a SHA256 commitment to the data is * known. This is important in the ECDSA anti-exfil protocol. */ secp256k1_s2c_ecdsa_data_sha256_tagged(&s2c_sha); - secp256k1_sha256_write(&s2c_sha, s2c_data32, 32); - secp256k1_sha256_finalize(&s2c_sha, ndata); + secp256k1_sha256_write(hash_ctx, &s2c_sha, s2c_data32, 32); + secp256k1_sha256_finalize(hash_ctx, &s2c_sha, ndata); secp256k1_sha256_clear(&s2c_sha); secp256k1_s2c_ecdsa_point_sha256_tagged(&s2c_sha); @@ -85,6 +86,7 @@ int secp256k1_ecdsa_s2c_sign(const secp256k1_context* ctx, secp256k1_ecdsa_signa } int secp256k1_ecdsa_s2c_verify_commit(const secp256k1_context* ctx, const secp256k1_ecdsa_signature* sig, const unsigned char* data32, const secp256k1_ecdsa_s2c_opening* opening) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_ge commitment_ge; secp256k1_ge original_pubnonce_ge; unsigned char x_bytes[32]; @@ -100,7 +102,7 @@ int secp256k1_ecdsa_s2c_verify_commit(const secp256k1_context* ctx, const secp25 return 0; } secp256k1_s2c_ecdsa_point_sha256_tagged(&s2c_sha); - if (!secp256k1_ec_commit(&commitment_ge, &original_pubnonce_ge, &s2c_sha, data32, 32)) { + if (!secp256k1_ec_commit(hash_ctx, &commitment_ge, &original_pubnonce_ge, &s2c_sha, data32, 32)) { return 0; } @@ -128,14 +130,15 @@ int secp256k1_ecdsa_s2c_verify_commit(const secp256k1_context* ctx, const secp25 /*** anti-exfil ***/ int secp256k1_ecdsa_anti_exfil_host_commit(const secp256k1_context* ctx, unsigned char* rand_commitment32, const unsigned char* rand32) { secp256k1_sha256 sha; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); VERIFY_CHECK(ctx != NULL); ARG_CHECK(rand_commitment32 != NULL); ARG_CHECK(rand32 != NULL); secp256k1_s2c_ecdsa_data_sha256_tagged(&sha); - secp256k1_sha256_write(&sha, rand32, 32); - secp256k1_sha256_finalize(&sha, rand_commitment32); + secp256k1_sha256_write(hash_ctx, &sha, rand32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, rand_commitment32); secp256k1_sha256_clear(&sha); return 1; } diff --git a/src/modules/ecdsa_s2c/tests_impl.h b/src/modules/ecdsa_s2c/tests_impl.h index d13e51647..ba4f158c5 100644 --- a/src/modules/ecdsa_s2c/tests_impl.h +++ b/src/modules/ecdsa_s2c/tests_impl.h @@ -11,6 +11,7 @@ #include "../../unit_test.h" static void test_ecdsa_s2c_tagged_hash(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned char tag_data[] = {'s', '2', 'c', '/', 'e', 'c', 'd', 's', 'a', '/', 'd', 'a', 't', 'a'}; unsigned char tag_point[] = {'s', '2', 'c', '/', 'e', 'c', 'd', 's', 'a', '/', 'p', 'o', 'i', 'n', 't'}; secp256k1_sha256 sha; @@ -18,16 +19,16 @@ static void test_ecdsa_s2c_tagged_hash(void) { unsigned char output[32]; unsigned char output_optimized[32]; - secp256k1_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, tag_data, sizeof(tag_data)); secp256k1_s2c_ecdsa_data_sha256_tagged(&sha_optimized); - secp256k1_sha256_finalize(&sha, output); - secp256k1_sha256_finalize(&sha_optimized, output_optimized); + secp256k1_sha256_finalize(hash_ctx, &sha, output); + secp256k1_sha256_finalize(hash_ctx, &sha_optimized, output_optimized); CHECK(secp256k1_memcmp_var(output, output_optimized, 32) == 0); - secp256k1_sha256_initialize_tagged(&sha, tag_point, sizeof(tag_point)); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, tag_point, sizeof(tag_point)); secp256k1_s2c_ecdsa_point_sha256_tagged(&sha_optimized); - secp256k1_sha256_finalize(&sha, output); - secp256k1_sha256_finalize(&sha_optimized, output_optimized); + secp256k1_sha256_finalize(hash_ctx, &sha, output); + secp256k1_sha256_finalize(hash_ctx, &sha_optimized, output_optimized); CHECK(secp256k1_memcmp_var(output, output_optimized, 32) == 0); } @@ -171,7 +172,7 @@ static void test_ecdsa_s2c_fixed_vectors(void) { }; size_t i; - for (i = 0; i < sizeof(ecdsa_s2c_tests) / sizeof(ecdsa_s2c_tests[0]); i++) { + for (i = 0; i < ARRAY_SIZE(ecdsa_s2c_tests); i++) { secp256k1_ecdsa_s2c_opening s2c_opening; unsigned char opening_ser[33]; const ecdsa_s2c_test *test = &ecdsa_s2c_tests[i]; @@ -248,7 +249,7 @@ static void test_ecdsa_anti_exfil_signer_commit(void) { 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, }; /* Check that original pubnonce is derived from s2c_data */ - for (i = 0; i < sizeof(ecdsa_s2c_tests) / sizeof(ecdsa_s2c_tests[0]); i++) { + for (i = 0; i < ARRAY_SIZE(ecdsa_s2c_tests); i++) { secp256k1_ecdsa_s2c_opening s2c_opening; unsigned char buf[33]; const ecdsa_s2c_test *test = &ecdsa_s2c_tests[i]; diff --git a/src/modules/ellswift/main_impl.h b/src/modules/ellswift/main_impl.h index f9ef0ac86..27cb3db65 100644 --- a/src/modules/ellswift/main_impl.h +++ b/src/modules/ellswift/main_impl.h @@ -307,7 +307,7 @@ static int secp256k1_ellswift_xswiftec_inv_var(secp256k1_fe *t, const secp256k1_ * hasher is a SHA256 object to which an incrementing 4-byte counter is written to generate randomness. * Writing 13 bytes (4 bytes for counter, plus 9 bytes for the SHA256 padding) cannot cross a * 64-byte block size boundary (to make sure it only triggers a single SHA256 compression). */ -static void secp256k1_ellswift_prng(unsigned char* out32, const secp256k1_sha256 *hasher, uint32_t cnt) { +static void secp256k1_ellswift_prng(const secp256k1_hash_ctx *hash_ctx, unsigned char* out32, const secp256k1_sha256 *hasher, uint32_t cnt) { secp256k1_sha256 hash = *hasher; unsigned char buf4[4]; #ifdef VERIFY @@ -317,8 +317,8 @@ static void secp256k1_ellswift_prng(unsigned char* out32, const secp256k1_sha256 buf4[1] = cnt >> 8; buf4[2] = cnt >> 16; buf4[3] = cnt >> 24; - secp256k1_sha256_write(&hash, buf4, 4); - secp256k1_sha256_finalize(&hash, out32); + secp256k1_sha256_write(hash_ctx, &hash, buf4, 4); + secp256k1_sha256_finalize(hash_ctx, &hash, out32); /* Writing and finalizing together should trigger exactly one SHA256 compression. */ VERIFY_CHECK(((hash.bytes) >> 6) == (blocks + 1)); @@ -330,7 +330,7 @@ static void secp256k1_ellswift_prng(unsigned char* out32, const secp256k1_sha256 * needs encoding. * * hasher is a hasher in the secp256k1_ellswift_prng sense, with the same restrictions. */ -static void secp256k1_ellswift_xelligatorswift_var(unsigned char *u32, secp256k1_fe *t, const secp256k1_fe *x, const secp256k1_sha256 *hasher) { +static void secp256k1_ellswift_xelligatorswift_var(const secp256k1_context *ctx, unsigned char *u32, secp256k1_fe *t, const secp256k1_fe *x, const secp256k1_sha256 *hasher) { /* Pool of 3-bit branch values. */ unsigned char branch_hash[32]; /* Number of 3-bit values in branch_hash left. */ @@ -346,14 +346,14 @@ static void secp256k1_ellswift_xelligatorswift_var(unsigned char *u32, secp256k1 secp256k1_fe u; /* If the pool of branch values is empty, populate it. */ if (branches_left == 0) { - secp256k1_ellswift_prng(branch_hash, hasher, cnt++); + secp256k1_ellswift_prng(secp256k1_get_hash_context(ctx), branch_hash, hasher, cnt++); branches_left = 64; } /* Take a 3-bit branch value from the branch pool (top bit is discarded). */ --branches_left; branch = (branch_hash[branches_left >> 1] >> ((branches_left & 1) << 2)) & 7; /* Compute a new u value by hashing. */ - secp256k1_ellswift_prng(u32, hasher, cnt++); + secp256k1_ellswift_prng(secp256k1_get_hash_context(ctx), u32, hasher, cnt++); /* overflow is not a problem (we prefer uniform u32 over uniform u). */ secp256k1_fe_set_b32_mod(&u, u32); /* Since u is the output of a hash, it should practically never be 0. We could apply the @@ -372,8 +372,8 @@ static void secp256k1_ellswift_xelligatorswift_var(unsigned char *u32, secp256k1 * as input, and returns an encoding that matches the provided Y coordinate rather than a random * one. */ -static void secp256k1_ellswift_elligatorswift_var(unsigned char *u32, secp256k1_fe *t, const secp256k1_ge *p, const secp256k1_sha256 *hasher) { - secp256k1_ellswift_xelligatorswift_var(u32, t, &p->x, hasher); +static void secp256k1_ellswift_elligatorswift_var(const secp256k1_context *ctx, unsigned char *u32, secp256k1_fe *t, const secp256k1_ge *p, const secp256k1_sha256 *hasher) { + secp256k1_ellswift_xelligatorswift_var(ctx, u32, t, &p->x, hasher); secp256k1_fe_normalize_var(t); if (secp256k1_fe_is_odd(t) != secp256k1_fe_is_odd(&p->y)) { secp256k1_fe_negate(t, t, 1); @@ -406,11 +406,11 @@ int secp256k1_ellswift_encode(const secp256k1_context *ctx, unsigned char *ell64 * BIP340 tagged hash with tag "secp256k1_ellswift_encode". */ secp256k1_ellswift_sha256_init_encode(&hash); secp256k1_eckey_pubkey_serialize33(&p, p64); - secp256k1_sha256_write(&hash, p64, sizeof(p64)); - secp256k1_sha256_write(&hash, rnd32, 32); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &hash, p64, sizeof(p64)); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &hash, rnd32, 32); /* Compute ElligatorSwift encoding and construct output. */ - secp256k1_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ + secp256k1_ellswift_elligatorswift_var(ctx, ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ secp256k1_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ return 1; } @@ -452,13 +452,13 @@ int secp256k1_ellswift_create(const secp256k1_context *ctx, unsigned char *ell64 /* Set up hasher state. The used RNG is H(privkey || "\x00"*32 [|| auxrnd32] || cnt++), * using BIP340 tagged hash with tag "secp256k1_ellswift_create". */ secp256k1_ellswift_sha256_init_create(&hash); - secp256k1_sha256_write(&hash, seckey32, 32); - secp256k1_sha256_write(&hash, zero32, sizeof(zero32)); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &hash, seckey32, 32); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &hash, zero32, sizeof(zero32)); secp256k1_declassify(ctx, &hash, sizeof(hash)); /* private key is hashed now */ - if (auxrnd32) secp256k1_sha256_write(&hash, auxrnd32, 32); + if (auxrnd32) secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &hash, auxrnd32, 32); /* Compute ElligatorSwift encoding and construct output. */ - secp256k1_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ + secp256k1_ellswift_elligatorswift_var(ctx, ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ secp256k1_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ secp256k1_memczero(ell64, 64, !ret); @@ -482,20 +482,24 @@ int secp256k1_ellswift_decode(const secp256k1_context *ctx, secp256k1_pubkey *pu return 1; } -static int ellswift_xdh_hash_function_prefix(unsigned char *output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { +static int ellswift_xdh_hash_function_prefix_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char *output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { secp256k1_sha256 sha; secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, data, 64); - secp256k1_sha256_write(&sha, ell_a64, 64); - secp256k1_sha256_write(&sha, ell_b64, 64); - secp256k1_sha256_write(&sha, x32, 32); - secp256k1_sha256_finalize(&sha, output); + secp256k1_sha256_write(hash_ctx, &sha, data, 64); + secp256k1_sha256_write(hash_ctx, &sha, ell_a64, 64); + secp256k1_sha256_write(hash_ctx, &sha, ell_b64, 64); + secp256k1_sha256_write(hash_ctx, &sha, x32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, output); secp256k1_sha256_clear(&sha); return 1; } +static int ellswift_xdh_hash_function_prefix(unsigned char *output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { + return ellswift_xdh_hash_function_prefix_impl(secp256k1_get_hash_context(secp256k1_context_static), output, x32, ell_a64, ell_b64, data); +} + /** Set hash state to the BIP340 tagged hash midstate for "bip324_ellswift_xonly_ecdh". */ static void secp256k1_ellswift_sha256_init_bip324(secp256k1_sha256* hash) { static const uint32_t midstate[8] = { @@ -505,21 +509,25 @@ static void secp256k1_ellswift_sha256_init_bip324(secp256k1_sha256* hash) { secp256k1_sha256_initialize_midstate(hash, 64, midstate); } -static int ellswift_xdh_hash_function_bip324(unsigned char* output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { +static int ellswift_xdh_hash_function_bip324_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char* output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { secp256k1_sha256 sha; (void)data; secp256k1_ellswift_sha256_init_bip324(&sha); - secp256k1_sha256_write(&sha, ell_a64, 64); - secp256k1_sha256_write(&sha, ell_b64, 64); - secp256k1_sha256_write(&sha, x32, 32); - secp256k1_sha256_finalize(&sha, output); + secp256k1_sha256_write(hash_ctx, &sha, ell_a64, 64); + secp256k1_sha256_write(hash_ctx, &sha, ell_b64, 64); + secp256k1_sha256_write(hash_ctx, &sha, x32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, output); secp256k1_sha256_clear(&sha); return 1; } +static int ellswift_xdh_hash_function_bip324(unsigned char* output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { + return ellswift_xdh_hash_function_bip324_impl(secp256k1_get_hash_context(secp256k1_context_static), output, x32, ell_a64, ell_b64, data); +} + const secp256k1_ellswift_xdh_hash_function secp256k1_ellswift_xdh_hash_function_prefix = ellswift_xdh_hash_function_prefix; const secp256k1_ellswift_xdh_hash_function secp256k1_ellswift_xdh_hash_function_bip324 = ellswift_xdh_hash_function_bip324; @@ -554,8 +562,14 @@ int secp256k1_ellswift_xdh(const secp256k1_context *ctx, unsigned char *output, secp256k1_fe_normalize(&px); secp256k1_fe_get_b32(sx, &px); - /* Invoke hasher */ - ret = hashfp(output, sx, ell_a64, ell_b64, data); + /* Invoke hasher. Use ctx-aware function by default */ + if (hashfp == secp256k1_ellswift_xdh_hash_function_bip324) { + ret = ellswift_xdh_hash_function_bip324_impl(secp256k1_get_hash_context(ctx), output, sx, ell_a64, ell_b64, data); + } else if (hashfp == secp256k1_ellswift_xdh_hash_function_prefix) { + ret = ellswift_xdh_hash_function_prefix_impl(secp256k1_get_hash_context(ctx), output, sx, ell_a64, ell_b64, data); + } else { + ret = hashfp(output, sx, ell_a64, ell_b64, data); + } secp256k1_memclear_explicit(sx, sizeof(sx)); secp256k1_fe_clear(&px); diff --git a/src/modules/ellswift/tests_impl.h b/src/modules/ellswift/tests_impl.h index e30904112..7da08d50d 100644 --- a/src/modules/ellswift/tests_impl.h +++ b/src/modules/ellswift/tests_impl.h @@ -8,6 +8,7 @@ #include "../../../include/secp256k1_ellswift.h" #include "../../unit_test.h" +#include "../../util.h" struct ellswift_xswiftec_inv_test { int enc_bitmap; @@ -180,7 +181,7 @@ static int ellswift_xdh_hash_x32(unsigned char *output, const unsigned char *x32 /* Run the test vectors for ellswift encoding */ void ellswift_encoding_test_vectors_tests(void) { int i; - for (i = 0; (unsigned)i < sizeof(ellswift_xswiftec_inv_tests) / sizeof(ellswift_xswiftec_inv_tests[0]); ++i) { + for (i = 0; (unsigned)i < ARRAY_SIZE(ellswift_xswiftec_inv_tests); ++i) { const struct ellswift_xswiftec_inv_test *testcase = &ellswift_xswiftec_inv_tests[i]; int c; for (c = 0; c < 8; ++c) { @@ -200,7 +201,7 @@ void ellswift_encoding_test_vectors_tests(void) { /* Run the test vectors for ellswift decoding */ void ellswift_decoding_test_vectors_tests(void) { int i; - for (i = 0; (unsigned)i < sizeof(ellswift_decode_tests) / sizeof(ellswift_decode_tests[0]); ++i) { + for (i = 0; (unsigned)i < ARRAY_SIZE(ellswift_decode_tests); ++i) { const struct ellswift_decode_test *testcase = &ellswift_decode_tests[i]; secp256k1_pubkey pubkey; secp256k1_ge ge; @@ -217,7 +218,7 @@ void ellswift_decoding_test_vectors_tests(void) { /* Run the test vectors for ellswift expected xdh BIP324 shared secrets */ void ellswift_xdh_test_vectors_tests(void) { int i; - for (i = 0; (unsigned)i < sizeof(ellswift_xdh_tests_bip324) / sizeof(ellswift_xdh_tests_bip324[0]); ++i) { + for (i = 0; (unsigned)i < ARRAY_SIZE(ellswift_xdh_tests_bip324); ++i) { const struct ellswift_xdh_test *test = &ellswift_xdh_tests_bip324[i]; unsigned char shared_secret[32]; int ret; @@ -431,9 +432,48 @@ void ellswift_xdh_correctness_tests(void) { } } +DEFINE_SHA256_TRANSFORM_PROBE(sha256_ellswift_xdh) +void ellswift_xdh_ctx_sha256_tests(void) { + /* Check ctx-provided SHA256 compression override takes effect */ + secp256k1_context *ctx = secp256k1_context_clone(CTX); + unsigned char out_default[65], out_custom[65]; + const unsigned char skA[32] = {1}, skB[32] = {2}; + unsigned char keyA[64], keyB[64], data[64] = {0}; + secp256k1_ellswift_xdh_hash_function hash_fn; + int i; + + CHECK(secp256k1_ellswift_create(ctx, keyA, skA, NULL)); + CHECK(secp256k1_ellswift_create(ctx, keyB, skB, NULL)); + + for (i = 0; i < 2; i++) { + if (i == 0) { + hash_fn = secp256k1_ellswift_xdh_hash_function_bip324; + } else { + hash_fn = secp256k1_ellswift_xdh_hash_function_prefix; + } + /* Default behavior. No ctx-provided SHA256 compression */ + CHECK(secp256k1_ellswift_xdh(ctx, out_default, keyA, keyB, skA, 0, hash_fn, data)); + CHECK(!sha256_ellswift_xdh_called); + + /* Override SHA256 compression directly, bypassing the ctx setter sanity checks */ + ctx->hash_ctx.fn_sha256_compression = sha256_ellswift_xdh; + CHECK(secp256k1_ellswift_xdh(ctx, out_custom, keyA, keyB, skA, 0, hash_fn, data)); + CHECK(sha256_ellswift_xdh_called); + /* Outputs must differ if custom compression was used */ + CHECK(secp256k1_memcmp_var(out_default, out_custom, 32) != 0); + + /* Restore defaults */ + sha256_ellswift_xdh_called = 0; + secp256k1_context_set_sha256_compression(ctx, NULL); + } + + secp256k1_context_destroy(ctx); +} + /* Test hash initializers */ void ellswift_hash_init_tests(void) { secp256k1_sha256 sha_optimized; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); /* "secp256k1_ellswift_encode" */ static const unsigned char encode_tag[] = {'s', 'e', 'c', 'p', '2', '5', '6', 'k', '1', '_', 'e', 'l', 'l', 's', 'w', 'i', 'f', 't', '_', 'e', 'n', 'c', 'o', 'd', 'e'}; /* "secp256k1_ellswift_create" */ @@ -445,19 +485,19 @@ void ellswift_hash_init_tests(void) { * secp256k1_ellswift_sha256_init_encode has the expected * state. */ secp256k1_ellswift_sha256_init_encode(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, encode_tag, sizeof(encode_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, encode_tag, sizeof(encode_tag)); /* Check that hash initialized by * secp256k1_ellswift_sha256_init_create has the expected * state. */ secp256k1_ellswift_sha256_init_create(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, create_tag, sizeof(create_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, create_tag, sizeof(create_tag)); /* Check that hash initialized by * secp256k1_ellswift_sha256_init_bip324 has the expected * state. */ secp256k1_ellswift_sha256_init_bip324(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, bip324_tag, sizeof(bip324_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, bip324_tag, sizeof(bip324_tag)); } void ellswift_xdh_bad_scalar_tests(void) { @@ -498,6 +538,7 @@ static const struct tf_test_entry tests_ellswift[] = { CASE1(ellswift_xdh_correctness_tests), CASE1(ellswift_hash_init_tests), CASE1(ellswift_xdh_bad_scalar_tests), + CASE1(ellswift_xdh_ctx_sha256_tests), }; #endif diff --git a/src/modules/generator/main_impl.h b/src/modules/generator/main_impl.h index c58d66fe5..c2a1adff6 100644 --- a/src/modules/generator/main_impl.h +++ b/src/modules/generator/main_impl.h @@ -211,6 +211,7 @@ static int secp256k1_generator_generate_internal(const secp256k1_context* ctx, s secp256k1_sha256 sha256; unsigned char b32[32]; int ret = 1; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); if (blind32) { secp256k1_scalar blind; @@ -220,9 +221,9 @@ static int secp256k1_generator_generate_internal(const secp256k1_context* ctx, s } secp256k1_sha256_initialize(&sha256); - secp256k1_sha256_write(&sha256, prefix1, 16); - secp256k1_sha256_write(&sha256, key32, 32); - secp256k1_sha256_finalize(&sha256, b32); + secp256k1_sha256_write(hash_ctx, &sha256, prefix1, 16); + secp256k1_sha256_write(hash_ctx, &sha256, key32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha256, b32); secp256k1_sha256_clear(&sha256); ret &= secp256k1_fe_set_b32_limit(&t, b32); shallue_van_de_woestijne(&add, &t); @@ -233,9 +234,9 @@ static int secp256k1_generator_generate_internal(const secp256k1_context* ctx, s } secp256k1_sha256_initialize(&sha256); - secp256k1_sha256_write(&sha256, prefix2, 16); - secp256k1_sha256_write(&sha256, key32, 32); - secp256k1_sha256_finalize(&sha256, b32); + secp256k1_sha256_write(hash_ctx, &sha256, prefix2, 16); + secp256k1_sha256_write(hash_ctx, &sha256, key32, 32); + secp256k1_sha256_finalize(hash_ctx, &sha256, b32); secp256k1_sha256_clear(&sha256); ret &= secp256k1_fe_set_b32_limit(&t, b32); shallue_van_de_woestijne(&add, &t); diff --git a/src/modules/musig/keyagg.h b/src/modules/musig/keyagg.h index a0b37252f..30e77aa95 100644 --- a/src/modules/musig/keyagg.h +++ b/src/modules/musig/keyagg.h @@ -27,6 +27,6 @@ typedef struct { static int secp256k1_keyagg_cache_load(const secp256k1_context* ctx, secp256k1_keyagg_cache_internal *cache_i, const secp256k1_musig_keyagg_cache *cache); -static void secp256k1_musig_keyaggcoef(secp256k1_scalar *r, const secp256k1_keyagg_cache_internal *cache_i, secp256k1_ge *pk); +static void secp256k1_musig_keyaggcoef(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *r, const secp256k1_keyagg_cache_internal *cache_i, secp256k1_ge *pk); #endif diff --git a/src/modules/musig/keyagg_impl.h b/src/modules/musig/keyagg_impl.h index 4eb48ddc8..f67245d56 100644 --- a/src/modules/musig/keyagg_impl.h +++ b/src/modules/musig/keyagg_impl.h @@ -82,9 +82,9 @@ static int secp256k1_musig_compute_pks_hash(const secp256k1_context *ctx, unsign return 0; } VERIFY_CHECK(ser_len == sizeof(ser)); - secp256k1_sha256_write(&sha, ser, sizeof(ser)); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &sha, ser, sizeof(ser)); } - secp256k1_sha256_finalize(&sha, pks_hash); + secp256k1_sha256_finalize(secp256k1_get_hash_context(ctx), &sha, pks_hash); return 1; } @@ -103,7 +103,7 @@ static void secp256k1_musig_keyaggcoef_sha256(secp256k1_sha256 *sha) { * second_pk is the point at infinity in case there is no second_pk. Assumes * that pk is not the point at infinity and that the Y-coordinates of pk and * second_pk are normalized. */ -static void secp256k1_musig_keyaggcoef_internal(secp256k1_scalar *r, const unsigned char *pks_hash, secp256k1_ge *pk, const secp256k1_ge *second_pk) { +static void secp256k1_musig_keyaggcoef_internal(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *r, const unsigned char *pks_hash, secp256k1_ge *pk, const secp256k1_ge *second_pk) { VERIFY_CHECK(!secp256k1_ge_is_infinity(pk)); if (!secp256k1_ge_is_infinity(second_pk) @@ -113,20 +113,20 @@ static void secp256k1_musig_keyaggcoef_internal(secp256k1_scalar *r, const unsig secp256k1_sha256 sha; unsigned char buf[33]; secp256k1_musig_keyaggcoef_sha256(&sha); - secp256k1_sha256_write(&sha, pks_hash, 32); + secp256k1_sha256_write(hash_ctx, &sha, pks_hash, 32); /* Serialization does not fail since the pk is not the point at infinity * (according to this function's precondition). */ secp256k1_eckey_pubkey_serialize33(pk, buf); - secp256k1_sha256_write(&sha, buf, sizeof(buf)); - secp256k1_sha256_finalize(&sha, buf); + secp256k1_sha256_write(hash_ctx, &sha, buf, sizeof(buf)); + secp256k1_sha256_finalize(hash_ctx, &sha, buf); secp256k1_scalar_set_b32(r, buf, NULL); } } /* Assumes that pk is not the point at infinity and that the Y-coordinates of pk * and cache_i->second_pk are normalized. */ -static void secp256k1_musig_keyaggcoef(secp256k1_scalar *r, const secp256k1_keyagg_cache_internal *cache_i, secp256k1_ge *pk) { - secp256k1_musig_keyaggcoef_internal(r, cache_i->pks_hash, pk, &cache_i->second_pk); +static void secp256k1_musig_keyaggcoef(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *r, const secp256k1_keyagg_cache_internal *cache_i, secp256k1_ge *pk) { + secp256k1_musig_keyaggcoef_internal(hash_ctx, r, cache_i->pks_hash, pk, &cache_i->second_pk); } typedef struct { @@ -149,7 +149,7 @@ static int secp256k1_musig_pubkey_agg_callback(secp256k1_scalar *sc, secp256k1_g #else (void) ret; #endif - secp256k1_musig_keyaggcoef_internal(sc, ctx->pks_hash, pt, &ctx->second_pk); + secp256k1_musig_keyaggcoef_internal(secp256k1_get_hash_context(ctx->ctx), sc, ctx->pks_hash, pt, &ctx->second_pk); return 1; } diff --git a/src/modules/musig/session_impl.h b/src/modules/musig/session_impl.h index c3ef3c9f7..7920fbfdb 100644 --- a/src/modules/musig/session_impl.h +++ b/src/modules/musig/session_impl.h @@ -289,20 +289,20 @@ int secp256k1_musig_partial_sig_serialize(const secp256k1_context* ctx, unsigned } /* Write optional inputs into the hash */ -static void secp256k1_nonce_function_musig_helper(secp256k1_sha256 *sha, unsigned int prefix_size, const unsigned char *data, unsigned char len) { +static void secp256k1_nonce_function_musig_helper(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *sha, unsigned int prefix_size, const unsigned char *data, unsigned char len) { unsigned char zero[7] = { 0 }; /* The spec requires length prefixes to be between 1 and 8 bytes * (inclusive) */ VERIFY_CHECK(prefix_size >= 1 && prefix_size <= 8); /* Since the length of all input data fits in a byte, we can always pad the * length prefix with prefix_size - 1 zero bytes. */ - secp256k1_sha256_write(sha, zero, prefix_size - 1); + secp256k1_sha256_write(hash_ctx, sha, zero, prefix_size - 1); if (data != NULL) { - secp256k1_sha256_write(sha, &len, 1); - secp256k1_sha256_write(sha, data, len); + secp256k1_sha256_write(hash_ctx, sha, &len, 1); + secp256k1_sha256_write(hash_ctx, sha, data, len); } else { len = 0; - secp256k1_sha256_write(sha, &len, 1); + secp256k1_sha256_write(hash_ctx, sha, &len, 1); } } @@ -326,7 +326,7 @@ static void secp256k1_nonce_function_musig_sha256_tagged(secp256k1_sha256 *sha) secp256k1_sha256_initialize_midstate(sha, 64, midstate); } -static void secp256k1_nonce_function_musig(secp256k1_scalar *k, const unsigned char *session_secrand, const unsigned char *msg32, const unsigned char *seckey32, const unsigned char *pk33, const unsigned char *agg_pk32, const unsigned char *extra_input32) { +static void secp256k1_nonce_function_musig(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *k, const unsigned char *session_secrand, const unsigned char *msg32, const unsigned char *seckey32, const unsigned char *pk33, const unsigned char *agg_pk32, const unsigned char *extra_input32) { secp256k1_sha256 sha; unsigned char rand[32]; unsigned char i; @@ -334,8 +334,8 @@ static void secp256k1_nonce_function_musig(secp256k1_scalar *k, const unsigned c if (seckey32 != NULL) { secp256k1_nonce_function_musig_sha256_tagged_aux(&sha); - secp256k1_sha256_write(&sha, session_secrand, 32); - secp256k1_sha256_finalize(&sha, rand); + secp256k1_sha256_write(hash_ctx, &sha, session_secrand, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, rand); for (i = 0; i < 32; i++) { rand[i] ^= seckey32[i]; } @@ -344,21 +344,21 @@ static void secp256k1_nonce_function_musig(secp256k1_scalar *k, const unsigned c } secp256k1_nonce_function_musig_sha256_tagged(&sha); - secp256k1_sha256_write(&sha, rand, sizeof(rand)); - secp256k1_nonce_function_musig_helper(&sha, 1, pk33, 33); - secp256k1_nonce_function_musig_helper(&sha, 1, agg_pk32, 32); + secp256k1_sha256_write(hash_ctx, &sha, rand, sizeof(rand)); + secp256k1_nonce_function_musig_helper(hash_ctx, &sha, 1, pk33, 33); + secp256k1_nonce_function_musig_helper(hash_ctx, &sha, 1, agg_pk32, 32); msg_present = msg32 != NULL; - secp256k1_sha256_write(&sha, &msg_present, 1); + secp256k1_sha256_write(hash_ctx, &sha, &msg_present, 1); if (msg_present) { - secp256k1_nonce_function_musig_helper(&sha, 8, msg32, 32); + secp256k1_nonce_function_musig_helper(hash_ctx, &sha, 8, msg32, 32); } - secp256k1_nonce_function_musig_helper(&sha, 4, extra_input32, 32); + secp256k1_nonce_function_musig_helper(hash_ctx, &sha, 4, extra_input32, 32); for (i = 0; i < 2; i++) { unsigned char buf[32]; secp256k1_sha256 sha_tmp = sha; - secp256k1_sha256_write(&sha_tmp, &i, 1); - secp256k1_sha256_finalize(&sha_tmp, buf); + secp256k1_sha256_write(hash_ctx, &sha_tmp, &i, 1); + secp256k1_sha256_finalize(hash_ctx, &sha_tmp, buf); secp256k1_scalar_set_b32(&k[i], buf, NULL); /* Attempt to erase secret data */ @@ -407,7 +407,7 @@ static int secp256k1_musig_nonce_gen_internal(const secp256k1_context* ctx, secp /* A pubkey cannot be the point at infinity */ secp256k1_eckey_pubkey_serialize33(&pk, pk_ser); - secp256k1_nonce_function_musig(k, input_nonce, msg32, seckey, pk_ser, aggpk_ser_ptr, extra_input32); + secp256k1_nonce_function_musig(secp256k1_get_hash_context(ctx), k, input_nonce, msg32, seckey, pk_ser, aggpk_ser_ptr, extra_input32); VERIFY_CHECK(!secp256k1_scalar_is_zero(&k[0])); VERIFY_CHECK(!secp256k1_scalar_is_zero(&k[1])); secp256k1_musig_secnonce_save(secnonce, k, &pk); @@ -541,7 +541,7 @@ static void secp256k1_musig_compute_noncehash_sha256_tagged(secp256k1_sha256 *sh } /* tagged_hash(aggnonce[0], aggnonce[1], agg_pk, msg) */ -static void secp256k1_musig_compute_noncehash(unsigned char *noncehash, secp256k1_ge *aggnonce, const unsigned char *agg_pk32, const unsigned char *msg) { +static void secp256k1_musig_compute_noncehash(const secp256k1_hash_ctx *hash_ctx, unsigned char *noncehash, secp256k1_ge *aggnonce, const unsigned char *agg_pk32, const unsigned char *msg) { unsigned char buf[33]; secp256k1_sha256 sha; int i; @@ -549,11 +549,11 @@ static void secp256k1_musig_compute_noncehash(unsigned char *noncehash, secp256k secp256k1_musig_compute_noncehash_sha256_tagged(&sha); for (i = 0; i < 2; i++) { secp256k1_musig_ge_serialize_ext(buf, &aggnonce[i]); - secp256k1_sha256_write(&sha, buf, sizeof(buf)); + secp256k1_sha256_write(hash_ctx, &sha, buf, sizeof(buf)); } - secp256k1_sha256_write(&sha, agg_pk32, 32); - secp256k1_sha256_write(&sha, msg, 32); - secp256k1_sha256_finalize(&sha, noncehash); + secp256k1_sha256_write(hash_ctx, &sha, agg_pk32, 32); + secp256k1_sha256_write(hash_ctx, &sha, msg, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, noncehash); } /* out_nonce = nonce_pts[0] + b*nonce_pts[1] */ @@ -565,12 +565,12 @@ static void secp256k1_effective_nonce(secp256k1_gej *out_nonce, const secp256k1_ secp256k1_gej_add_ge_var(out_nonce, out_nonce, &nonce_pts[0], NULL); } -static void secp256k1_musig_nonce_process_internal(int *fin_nonce_parity, unsigned char *fin_nonce, secp256k1_scalar *b, secp256k1_ge *aggnonce_pts, const unsigned char *agg_pk32, const unsigned char *msg) { +static void secp256k1_musig_nonce_process_internal(const secp256k1_context *ctx, int *fin_nonce_parity, unsigned char *fin_nonce, secp256k1_scalar *b, secp256k1_ge *aggnonce_pts, const unsigned char *agg_pk32, const unsigned char *msg) { unsigned char noncehash[32]; secp256k1_ge fin_nonce_pt; secp256k1_gej fin_nonce_ptj; - secp256k1_musig_compute_noncehash(noncehash, aggnonce_pts, agg_pk32, msg); + secp256k1_musig_compute_noncehash(secp256k1_get_hash_context(ctx), noncehash, aggnonce_pts, agg_pk32, msg); secp256k1_scalar_set_b32(b, noncehash, NULL); /* fin_nonce = aggnonce_pts[0] + b*aggnonce_pts[1] */ secp256k1_effective_nonce(&fin_nonce_ptj, aggnonce_pts, b); @@ -619,8 +619,8 @@ int secp256k1_musig_nonce_process(const secp256k1_context* ctx, secp256k1_musig_ secp256k1_ge_set_gej(&aggnonce_pts[0], &tmp); } - secp256k1_musig_nonce_process_internal(&session_i.fin_nonce_parity, fin_nonce, &session_i.noncecoef, aggnonce_pts, agg_pk32, msg32); - secp256k1_schnorrsig_challenge(&session_i.challenge, fin_nonce, msg32, 32, agg_pk32); + secp256k1_musig_nonce_process_internal(ctx, &session_i.fin_nonce_parity, fin_nonce, &session_i.noncecoef, aggnonce_pts, agg_pk32, msg32); + secp256k1_schnorrsig_challenge(secp256k1_get_hash_context(ctx), &session_i.challenge, fin_nonce, msg32, 32, agg_pk32); /* If there is a tweak then set `challenge` times `tweak` to the `s`-part.*/ secp256k1_scalar_set_int(&session_i.s_part, 0); @@ -690,7 +690,7 @@ int secp256k1_musig_partial_sign(const secp256k1_context* ctx, secp256k1_musig_p } /* Multiply KeyAgg coefficient */ - secp256k1_musig_keyaggcoef(&mu, &cache_i, &pk); + secp256k1_musig_keyaggcoef(secp256k1_get_hash_context(ctx), &mu, &cache_i, &pk); secp256k1_scalar_mul(&sk, &sk, &mu); if (!secp256k1_musig_session_load(ctx, &session_i, session)) { @@ -750,7 +750,7 @@ int secp256k1_musig_partial_sig_verify(const secp256k1_context* ctx, const secp2 /* Multiplying the challenge by the KeyAgg coefficient is equivalent * to multiplying the signer's public key by the coefficient, except * much easier to do. */ - secp256k1_musig_keyaggcoef(&mu, &cache_i, &pkp); + secp256k1_musig_keyaggcoef(secp256k1_get_hash_context(ctx), &mu, &cache_i, &pkp); secp256k1_scalar_mul(&e, &session_i.challenge, &mu); /* Negate e if secp256k1_fe_is_odd(&cache_i.pk.y)) XOR cache_i.parity_acc. diff --git a/src/modules/musig/tests_impl.h b/src/modules/musig/tests_impl.h index b6d459a2c..51b480393 100644 --- a/src/modules/musig/tests_impl.h +++ b/src/modules/musig/tests_impl.h @@ -548,12 +548,12 @@ static void musig_api_tests(void) { CHECK_ILLEGAL(CTX, secp256k1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, 2)); } -static void musig_nonce_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes) { +static void musig_nonce_bitflip(const secp256k1_hash_ctx *hash_ctx, unsigned char **args, size_t n_flip, size_t n_bytes) { secp256k1_scalar k1[2], k2[2]; - secp256k1_nonce_function_musig(k1, args[0], args[1], args[2], args[3], args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k1, args[0], args[1], args[2], args[3], args[4], args[5]); testrand_flip(args[n_flip], n_bytes); - secp256k1_nonce_function_musig(k2, args[0], args[1], args[2], args[3], args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k2, args[0], args[1], args[2], args[3], args[4], args[5]); CHECK(secp256k1_scalar_eq(&k1[0], &k2[0]) == 0); CHECK(secp256k1_scalar_eq(&k1[1], &k2[1]) == 0); } @@ -569,6 +569,7 @@ static void musig_nonce_test(void) { int i, j; secp256k1_scalar k[6][2]; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); testrand_bytes_test(session_secrand, sizeof(session_secrand)); testrand_bytes_test(sk, sizeof(sk)); testrand_bytes_test(pk, sizeof(pk)); @@ -584,12 +585,12 @@ static void musig_nonce_test(void) { args[4] = agg_pk; args[5] = extra_input; for (i = 0; i < COUNT; i++) { - musig_nonce_bitflip(args, 0, sizeof(session_secrand)); - musig_nonce_bitflip(args, 1, sizeof(msg)); - musig_nonce_bitflip(args, 2, sizeof(sk)); - musig_nonce_bitflip(args, 3, sizeof(pk)); - musig_nonce_bitflip(args, 4, sizeof(agg_pk)); - musig_nonce_bitflip(args, 5, sizeof(extra_input)); + musig_nonce_bitflip(hash_ctx, args, 0, sizeof(session_secrand)); + musig_nonce_bitflip(hash_ctx, args, 1, sizeof(msg)); + musig_nonce_bitflip(hash_ctx, args, 2, sizeof(sk)); + musig_nonce_bitflip(hash_ctx, args, 3, sizeof(pk)); + musig_nonce_bitflip(hash_ctx, args, 4, sizeof(agg_pk)); + musig_nonce_bitflip(hash_ctx, args, 5, sizeof(extra_input)); } /* Check that if any argument is NULL, a different nonce is produced than if * any other argument is NULL. */ @@ -598,12 +599,12 @@ static void musig_nonce_test(void) { memcpy(pk, session_secrand, sizeof(session_secrand)); memcpy(agg_pk, session_secrand, sizeof(agg_pk)); memcpy(extra_input, session_secrand, sizeof(extra_input)); - secp256k1_nonce_function_musig(k[0], args[0], args[1], args[2], args[3], args[4], args[5]); - secp256k1_nonce_function_musig(k[1], args[0], NULL, args[2], args[3], args[4], args[5]); - secp256k1_nonce_function_musig(k[2], args[0], args[1], NULL, args[3], args[4], args[5]); - secp256k1_nonce_function_musig(k[3], args[0], args[1], args[2], NULL, args[4], args[5]); - secp256k1_nonce_function_musig(k[4], args[0], args[1], args[2], args[3], NULL, args[5]); - secp256k1_nonce_function_musig(k[5], args[0], args[1], args[2], args[3], args[4], NULL); + secp256k1_nonce_function_musig(hash_ctx, k[0], args[0], args[1], args[2], args[3], args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k[1], args[0], NULL, args[2], args[3], args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k[2], args[0], args[1], NULL, args[3], args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k[3], args[0], args[1], args[2], NULL, args[4], args[5]); + secp256k1_nonce_function_musig(hash_ctx, k[4], args[0], args[1], args[2], args[3], NULL, args[5]); + secp256k1_nonce_function_musig(hash_ctx, k[5], args[0], args[1], args[2], args[3], args[4], NULL); for (i = 0; i < 6; i++) { CHECK(!secp256k1_scalar_eq(&k[i][0], &k[i][1])); for (j = i+1; j < 6; j++) { @@ -722,35 +723,36 @@ static void scriptless_atomic_swap_internal(void) { * state. */ static void sha256_tag_test(void) { secp256k1_sha256 sha; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); { /* "KeyAgg list" */ static const unsigned char tag[] = {'K', 'e', 'y', 'A', 'g', 'g', ' ', 'l', 'i', 's', 't'}; secp256k1_musig_keyagglist_sha256(&sha); - test_sha256_tag_midstate(&sha, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag)); } { /* "KeyAgg coefficient" */ static const unsigned char tag[] = {'K', 'e', 'y', 'A', 'g', 'g', ' ', 'c', 'o', 'e', 'f', 'f', 'i', 'c', 'i', 'e', 'n', 't'}; secp256k1_musig_keyaggcoef_sha256(&sha); - test_sha256_tag_midstate(&sha, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag)); } { /* "MuSig/aux" */ static const unsigned char tag[] = { 'M', 'u', 'S', 'i', 'g', '/', 'a', 'u', 'x' }; secp256k1_nonce_function_musig_sha256_tagged_aux(&sha); - test_sha256_tag_midstate(&sha, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag)); } { /* "MuSig/nonce" */ static const unsigned char tag[] = { 'M', 'u', 'S', 'i', 'g', '/', 'n', 'o', 'n', 'c', 'e' }; secp256k1_nonce_function_musig_sha256_tagged(&sha); - test_sha256_tag_midstate(&sha, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag)); } { /* "MuSig/noncecoef" */ static const unsigned char tag[] = { 'M', 'u', 'S', 'i', 'g', '/', 'n', 'o', 'n', 'c', 'e', 'c', 'o', 'e', 'f' }; secp256k1_musig_compute_noncehash_sha256_tagged(&sha); - test_sha256_tag_midstate(&sha, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag)); } } @@ -917,7 +919,7 @@ static void musig_test_vectors_keyagg(void) { size_t i; const struct musig_key_agg_vector *vector = &musig_key_agg_vector; - for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->valid_case); i++) { const struct musig_key_agg_valid_test_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -927,7 +929,7 @@ static void musig_test_vectors_keyagg(void) { CHECK(secp256k1_memcmp_var(agg_pk, c->expected, sizeof(agg_pk)) == 0); } - for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->error_case); i++) { const struct musig_key_agg_error_test_case *c = &vector->error_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -941,7 +943,7 @@ static void musig_test_vectors_noncegen(void) { size_t i; const struct musig_nonce_gen_vector *vector = &musig_nonce_gen_vector; - for (i = 0; i < sizeof(vector->test_case)/sizeof(vector->test_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->test_case); i++) { const struct musig_nonce_gen_test_case *c = &vector->test_case[i]; secp256k1_musig_keyagg_cache keyagg_cache; secp256k1_musig_keyagg_cache *keyagg_cache_ptr = NULL; @@ -995,7 +997,7 @@ static void musig_test_vectors_nonceagg(void) { int j; const struct musig_nonce_agg_vector *vector = &musig_nonce_agg_vector; - for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->valid_case); i++) { const struct musig_nonce_agg_test_case *c = &vector->valid_case[i]; secp256k1_musig_pubnonce pubnonce[2]; const secp256k1_musig_pubnonce *pubnonce_ptr[2]; @@ -1010,7 +1012,7 @@ static void musig_test_vectors_nonceagg(void) { CHECK(secp256k1_musig_aggnonce_serialize(CTX, aggnonce66, &aggnonce)); CHECK(secp256k1_memcmp_var(aggnonce66, c->expected, 33) == 0); } - for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->error_case); i++) { const struct musig_nonce_agg_test_case *c = &vector->error_case[i]; secp256k1_musig_pubnonce pubnonce[2]; for (j = 0; j < 2; j++) { @@ -1034,7 +1036,7 @@ static void musig_test_vectors_signverify(void) { size_t i; const struct musig_sign_verify_vector *vector = &musig_sign_verify_vector; - for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->valid_case); i++) { const struct musig_valid_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1062,7 +1064,7 @@ static void musig_test_vectors_signverify(void) { CHECK(secp256k1_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[0])); CHECK(secp256k1_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); } - for (i = 0; i < sizeof(vector->sign_error_case)/sizeof(vector->sign_error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->sign_error_case); i++) { const struct musig_sign_error_case *c = &vector->sign_error_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1101,7 +1103,7 @@ static void musig_test_vectors_signverify(void) { musig_test_set_secnonce(&secnonce, vector->secnonces[c->secnonce_index], &pubkey); CHECK_ILLEGAL(CTX, secp256k1_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); } - for (i = 0; i < sizeof(vector->verify_fail_case)/sizeof(vector->verify_fail_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->verify_fail_case); i++) { const struct musig_verify_fail_error_case *c = &vector->verify_fail_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1135,7 +1137,7 @@ static void musig_test_vectors_signverify(void) { expected = c->error != MUSIG_SIG_VERIFY; CHECK(expected == secp256k1_musig_partial_sig_verify(CTX, &partial_sig, pubnonce, &pubkey, &keyagg_cache, &session)); } - for (i = 0; i < sizeof(vector->verify_error_case)/sizeof(vector->verify_error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->verify_error_case); i++) { const struct musig_verify_fail_error_case *c = &vector->verify_error_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1163,7 +1165,7 @@ static void musig_test_vectors_tweak(void) { CHECK(secp256k1_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonce)); CHECK(secp256k1_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); - for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->valid_case); i++) { const struct musig_tweak_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1187,7 +1189,7 @@ static void musig_test_vectors_tweak(void) { CHECK(secp256k1_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[c->nonce_indices[c->signer_index]])); CHECK(secp256k1_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); } - for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->error_case); i++) { const struct musig_tweak_case *c = &vector->error_case[i]; enum MUSIG_ERROR error; secp256k1_musig_keyagg_cache keyagg_cache; @@ -1200,7 +1202,7 @@ static void musig_test_vectors_sigagg(void) { size_t i, j; const struct musig_sig_agg_vector *vector = &musig_sig_agg_vector; - for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->valid_case); i++) { const struct musig_sig_agg_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; unsigned char final_sig[64]; @@ -1209,8 +1211,8 @@ static void musig_test_vectors_sigagg(void) { secp256k1_xonly_pubkey agg_pk; secp256k1_musig_aggnonce aggnonce; secp256k1_musig_session session; - secp256k1_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; - const secp256k1_musig_partial_sig *partial_sig_ptr[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; + secp256k1_musig_partial_sig partial_sig[ARRAY_SIZE(vector->psigs)]; + const secp256k1_musig_partial_sig *partial_sig_ptr[ARRAY_SIZE(vector->psigs)]; CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, agg_pk32, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, c->tweak_indices_len, c->tweak_indices, c->is_xonly)); CHECK(secp256k1_musig_aggnonce_parse(CTX, &aggnonce, c->aggnonce)); @@ -1226,9 +1228,9 @@ static void musig_test_vectors_sigagg(void) { CHECK(secp256k1_xonly_pubkey_parse(CTX, &agg_pk, agg_pk32)); CHECK(secp256k1_schnorrsig_verify(CTX, final_sig, vector->msg, sizeof(vector->msg), &agg_pk) == 1); } - for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { + for (i = 0; i < ARRAY_SIZE(vector->error_case); i++) { const struct musig_sig_agg_case *c = &vector->error_case[i]; - secp256k1_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; + secp256k1_musig_partial_sig partial_sig[ARRAY_SIZE(vector->psigs)]; for (j = 0; j < c->psig_indices_len; j++) { int expected = c->invalid_sig_idx != (int)j; CHECK(expected == secp256k1_musig_partial_sig_parse(CTX, &partial_sig[j], vector->psigs[c->psig_indices[j]])); diff --git a/src/modules/rangeproof/borromean.h b/src/modules/rangeproof/borromean.h index 5620deacf..b4d58d31d 100644 --- a/src/modules/rangeproof/borromean.h +++ b/src/modules/rangeproof/borromean.h @@ -14,10 +14,10 @@ #include "../../ecmult.h" #include "../../ecmult_gen.h" -static int secp256k1_borromean_verify(secp256k1_scalar *evalues, const unsigned char *e0, const secp256k1_scalar *s, +static int secp256k1_borromean_verify(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *evalues, const unsigned char *e0, const secp256k1_scalar *s, const secp256k1_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen); -static int secp256k1_borromean_sign(const secp256k1_ecmult_gen_context *ecmult_gen_ctx, +static int secp256k1_borromean_sign(const secp256k1_hash_ctx *hash_ctx, const secp256k1_ecmult_gen_context *ecmult_gen_ctx, unsigned char *e0, secp256k1_scalar *s, const secp256k1_gej *pubs, const secp256k1_scalar *k, const secp256k1_scalar *sec, const size_t *rsizes, const size_t *secidx, size_t nrings, const unsigned char *m, size_t mlen); diff --git a/src/modules/rangeproof/borromean_impl.h b/src/modules/rangeproof/borromean_impl.h index 2fff3c28c..cd62abaae 100644 --- a/src/modules/rangeproof/borromean_impl.h +++ b/src/modules/rangeproof/borromean_impl.h @@ -20,7 +20,7 @@ #include #include -SECP256K1_INLINE static void secp256k1_borromean_hash(unsigned char *hash, const unsigned char *m, size_t mlen, const unsigned char *e, size_t elen, +SECP256K1_INLINE static void secp256k1_borromean_hash(const secp256k1_hash_ctx *hash_ctx, unsigned char *hash, const unsigned char *m, size_t mlen, const unsigned char *e, size_t elen, size_t ridx, size_t eidx) { unsigned char ring[4]; unsigned char epos[4]; @@ -28,11 +28,11 @@ SECP256K1_INLINE static void secp256k1_borromean_hash(unsigned char *hash, const secp256k1_sha256_initialize(&sha256_en); secp256k1_write_be32(ring, (uint32_t)ridx); secp256k1_write_be32(epos, (uint32_t)eidx); - secp256k1_sha256_write(&sha256_en, e, elen); - secp256k1_sha256_write(&sha256_en, m, mlen); - secp256k1_sha256_write(&sha256_en, ring, 4); - secp256k1_sha256_write(&sha256_en, epos, 4); - secp256k1_sha256_finalize(&sha256_en, hash); + secp256k1_sha256_write(hash_ctx, &sha256_en, e, elen); + secp256k1_sha256_write(hash_ctx, &sha256_en, m, mlen); + secp256k1_sha256_write(hash_ctx, &sha256_en, ring, 4); + secp256k1_sha256_write(hash_ctx, &sha256_en, epos, 4); + secp256k1_sha256_finalize(hash_ctx, &sha256_en, hash); secp256k1_sha256_clear(&sha256_en); } @@ -50,7 +50,7 @@ SECP256K1_INLINE static void secp256k1_borromean_hash(unsigned char *hash, const * | | r_i = r * | return e_0 ==== H(r_{0..i}||m) */ -int secp256k1_borromean_verify(secp256k1_scalar *evalues, const unsigned char *e0, +int secp256k1_borromean_verify(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *evalues, const unsigned char *e0, const secp256k1_scalar *s, const secp256k1_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen) { secp256k1_gej rgej; secp256k1_ge rge; @@ -71,7 +71,7 @@ int secp256k1_borromean_verify(secp256k1_scalar *evalues, const unsigned char *e secp256k1_sha256_initialize(&sha256_e0); for (i = 0; i < nrings; i++) { VERIFY_CHECK(INT_MAX - count > rsizes[i]); - secp256k1_borromean_hash(tmp, m, mlen, e0, 32, i, 0); + secp256k1_borromean_hash(hash_ctx, tmp, m, mlen, e0, 32, i, 0); secp256k1_scalar_set_b32(&ens, tmp, &overflow); for (j = 0; j < rsizes[i]; j++) { if (overflow || secp256k1_scalar_is_zero(&s[count]) || secp256k1_scalar_is_zero(&ens) || secp256k1_gej_is_infinity(&pubs[count])) { @@ -89,21 +89,21 @@ int secp256k1_borromean_verify(secp256k1_scalar *evalues, const unsigned char *e secp256k1_ge_set_gej_var(&rge, &rgej); secp256k1_eckey_pubkey_serialize33(&rge, tmp); if (j != rsizes[i] - 1) { - secp256k1_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); + secp256k1_borromean_hash(hash_ctx, tmp, m, mlen, tmp, 33, i, j + 1); secp256k1_scalar_set_b32(&ens, tmp, &overflow); } else { - secp256k1_sha256_write(&sha256_e0, tmp, 33); + secp256k1_sha256_write(hash_ctx, &sha256_e0, tmp, 33); } count++; } } - secp256k1_sha256_write(&sha256_e0, m, mlen); - secp256k1_sha256_finalize(&sha256_e0, tmp); + secp256k1_sha256_write(hash_ctx, &sha256_e0, m, mlen); + secp256k1_sha256_finalize(hash_ctx, &sha256_e0, tmp); secp256k1_sha256_clear(&sha256_e0); return secp256k1_memcmp_var(e0, tmp, 32) == 0; } -int secp256k1_borromean_sign(const secp256k1_ecmult_gen_context *ecmult_gen_ctx, +int secp256k1_borromean_sign(const secp256k1_hash_ctx *hash_ctx, const secp256k1_ecmult_gen_context *ecmult_gen_ctx, unsigned char *e0, secp256k1_scalar *s, const secp256k1_gej *pubs, const secp256k1_scalar *k, const secp256k1_scalar *sec, const size_t *rsizes, const size_t *secidx, size_t nrings, const unsigned char *m, size_t mlen) { secp256k1_gej rgej; @@ -136,7 +136,7 @@ int secp256k1_borromean_sign(const secp256k1_ecmult_gen_context *ecmult_gen_ctx, } secp256k1_eckey_pubkey_serialize33(&rge, tmp); for (j = secidx[i] + 1; j < rsizes[i]; j++) { - secp256k1_borromean_hash(tmp, m, mlen, tmp, 33, i, j); + secp256k1_borromean_hash(hash_ctx, tmp, m, mlen, tmp, 33, i, j); secp256k1_scalar_set_b32(&ens, tmp, &overflow); if (overflow || secp256k1_scalar_is_zero(&ens)) { return 0; @@ -152,16 +152,16 @@ int secp256k1_borromean_sign(const secp256k1_ecmult_gen_context *ecmult_gen_ctx, secp256k1_ge_set_gej_var(&rge, &rgej); secp256k1_eckey_pubkey_serialize33(&rge, tmp); } - secp256k1_sha256_write(&sha256_e0, tmp, 33); + secp256k1_sha256_write(hash_ctx, &sha256_e0, tmp, 33); count += rsizes[i]; } - secp256k1_sha256_write(&sha256_e0, m, mlen); - secp256k1_sha256_finalize(&sha256_e0, e0); + secp256k1_sha256_write(hash_ctx, &sha256_e0, m, mlen); + secp256k1_sha256_finalize(hash_ctx, &sha256_e0, e0); secp256k1_sha256_clear(&sha256_e0); count = 0; for (i = 0; i < nrings; i++) { VERIFY_CHECK(INT_MAX - count > rsizes[i]); - secp256k1_borromean_hash(tmp, m, mlen, e0, 32, i, 0); + secp256k1_borromean_hash(hash_ctx, tmp, m, mlen, e0, 32, i, 0); secp256k1_scalar_set_b32(&ens, tmp, &overflow); if (overflow || secp256k1_scalar_is_zero(&ens)) { return 0; @@ -173,7 +173,7 @@ int secp256k1_borromean_sign(const secp256k1_ecmult_gen_context *ecmult_gen_ctx, } secp256k1_ge_set_gej_var(&rge, &rgej); secp256k1_eckey_pubkey_serialize33(&rge, tmp); - secp256k1_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); + secp256k1_borromean_hash(hash_ctx, tmp, m, mlen, tmp, 33, i, j + 1); secp256k1_scalar_set_b32(&ens, tmp, &overflow); if (overflow || secp256k1_scalar_is_zero(&ens)) { return 0; diff --git a/src/modules/rangeproof/main_impl.h b/src/modules/rangeproof/main_impl.h index 32614caa9..420338dd5 100644 --- a/src/modules/rangeproof/main_impl.h +++ b/src/modules/rangeproof/main_impl.h @@ -32,6 +32,7 @@ int secp256k1_rangeproof_rewind(const secp256k1_context* ctx, unsigned char *blind_out, uint64_t *value_out, unsigned char *message_out, size_t *outlen, const unsigned char *nonce, uint64_t *min_value, uint64_t *max_value, const secp256k1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const secp256k1_generator* gen) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_ge commitp; secp256k1_ge genp; VERIFY_CHECK(ctx != NULL); @@ -46,12 +47,13 @@ int secp256k1_rangeproof_rewind(const secp256k1_context* ctx, ARG_CHECK(secp256k1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); secp256k1_pedersen_commitment_load(&commitp, commit); secp256k1_generator_load(&genp, gen); - return secp256k1_rangeproof_verify_impl(&ctx->ecmult_gen_ctx, + return secp256k1_rangeproof_verify_impl(hash_ctx, &ctx->ecmult_gen_ctx, blind_out, value_out, message_out, outlen, nonce, min_value, max_value, &commitp, proof, plen, extra_commit, extra_commit_len, &genp); } int secp256k1_rangeproof_verify(const secp256k1_context* ctx, uint64_t *min_value, uint64_t *max_value, const secp256k1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const secp256k1_generator* gen) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_ge commitp; secp256k1_ge genp; VERIFY_CHECK(ctx != NULL); @@ -63,13 +65,14 @@ int secp256k1_rangeproof_verify(const secp256k1_context* ctx, uint64_t *min_valu ARG_CHECK(gen != NULL); secp256k1_pedersen_commitment_load(&commitp, commit); secp256k1_generator_load(&genp, gen); - return secp256k1_rangeproof_verify_impl(NULL, + return secp256k1_rangeproof_verify_impl(hash_ctx, NULL, NULL, NULL, NULL, NULL, NULL, min_value, max_value, &commitp, proof, plen, extra_commit, extra_commit_len, &genp); } int secp256k1_rangeproof_sign(const secp256k1_context* ctx, unsigned char *proof, size_t *plen, uint64_t min_value, const secp256k1_pedersen_commitment *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const secp256k1_generator* gen){ + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_ge commitp; secp256k1_ge genp; VERIFY_CHECK(ctx != NULL); @@ -84,7 +87,7 @@ int secp256k1_rangeproof_sign(const secp256k1_context* ctx, unsigned char *proof ARG_CHECK(secp256k1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); secp256k1_pedersen_commitment_load(&commitp, commit); secp256k1_generator_load(&genp, gen); - return secp256k1_rangeproof_sign_impl(&ctx->ecmult_gen_ctx, + return secp256k1_rangeproof_sign_impl(hash_ctx, &ctx->ecmult_gen_ctx, proof, plen, min_value, &commitp, blind, nonce, exp, min_bits, value, message, msg_len, extra_commit, extra_commit_len, &genp); } diff --git a/src/modules/rangeproof/rangeproof_impl.h b/src/modules/rangeproof/rangeproof_impl.h index 476da5eb0..50211b079 100644 --- a/src/modules/rangeproof/rangeproof_impl.h +++ b/src/modules/rangeproof/rangeproof_impl.h @@ -58,7 +58,7 @@ SECP256K1_INLINE static void secp256k1_rangeproof_serialize_point(unsigned char* secp256k1_fe_get_b32(data + 1, &pointx); } -SECP256K1_INLINE static int secp256k1_rangeproof_genrand(secp256k1_scalar *sec, secp256k1_scalar *s, unsigned char *message, +SECP256K1_INLINE static int secp256k1_rangeproof_genrand(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *sec, secp256k1_scalar *s, unsigned char *message, size_t *rsizes, size_t rings, const unsigned char *nonce, const secp256k1_ge *commit, const unsigned char *proof, size_t len, const secp256k1_ge* genp) { unsigned char tmp[32]; unsigned char rngseed[32 + 33 + 33 + 10]; @@ -75,15 +75,15 @@ SECP256K1_INLINE static int secp256k1_rangeproof_genrand(secp256k1_scalar *sec, secp256k1_rangeproof_serialize_point(rngseed + 32, commit); secp256k1_rangeproof_serialize_point(rngseed + 32 + 33, genp); memcpy(rngseed + 33 + 33 + 32, proof, len); - secp256k1_rfc6979_hmac_sha256_initialize(&rng, rngseed, 32 + 33 + 33 + len); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, rngseed, 32 + 33 + 33 + len); secp256k1_scalar_set_int(&acc, 0); npub = 0; ret = 1; for (i = 0; i < rings; i++) { if (i < rings - 1) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, tmp, 32); do { - secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, tmp, 32); secp256k1_scalar_set_b32(&sec[i], tmp, &overflow); } while (overflow || secp256k1_scalar_is_zero(&sec[i])); secp256k1_scalar_add(&acc, &acc, &sec[i]); @@ -92,7 +92,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_genrand(secp256k1_scalar *sec, sec[i] = acc; } for (j = 0; j < rsizes[i]; j++) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, tmp, 32); if (message) { for (b = 0; b < 32; b++) { tmp[b] ^= message[(i * 4 + j) * 32 + b]; @@ -189,7 +189,7 @@ SECP256K1_INLINE static int secp256k1_range_proveparams(uint64_t *v, size_t *rin } /* strawman interface, writes proof in proof, a buffer of plen, proves with respect to min_value the range for commit which has the provided blinding factor and value. */ -SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_ecmult_gen_context* ecmult_gen_ctx, +SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_hash_ctx *hash_ctx, const secp256k1_ecmult_gen_context* ecmult_gen_ctx, unsigned char *proof, size_t *plen, uint64_t min_value, const secp256k1_ge *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const secp256k1_ge* genp){ @@ -245,10 +245,10 @@ SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_ecmul } secp256k1_sha256_initialize(&sha256_m); secp256k1_rangeproof_serialize_point(tmp, commit); - secp256k1_sha256_write(&sha256_m, tmp, 33); + secp256k1_sha256_write(hash_ctx, &sha256_m, tmp, 33); secp256k1_rangeproof_serialize_point(tmp, genp); - secp256k1_sha256_write(&sha256_m, tmp, 33); - secp256k1_sha256_write(&sha256_m, proof, len); + secp256k1_sha256_write(hash_ctx, &sha256_m, tmp, 33); + secp256k1_sha256_write(hash_ctx, &sha256_m, proof, len); memset(prep, 0, 4096); if (message != NULL) { @@ -267,7 +267,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_ecmul } prep[idx] = 128; } - if (!secp256k1_rangeproof_genrand(sec, s, prep, rsizes, rings, nonce, commit, proof, len, genp)) { + if (!secp256k1_rangeproof_genrand(hash_ctx, sec, s, prep, rsizes, rings, nonce, commit, proof, len, genp)) { return 0; } secp256k1_memclear_explicit(prep, 4096); @@ -309,7 +309,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_ecmul secp256k1_ge_set_gej_var(&c, &pubs[npub]); secp256k1_rangeproof_serialize_point(tmpc, &c); quadness = tmpc[0]; - secp256k1_sha256_write(&sha256_m, tmpc, 33); + secp256k1_sha256_write(hash_ctx, &sha256_m, tmpc, 33); signs[i>>3] |= quadness << (i&7); memcpy(&proof[len], tmpc + 1, 32); len += 32; @@ -318,11 +318,11 @@ SECP256K1_INLINE static int secp256k1_rangeproof_sign_impl(const secp256k1_ecmul } secp256k1_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp); if (extra_commit != NULL) { - secp256k1_sha256_write(&sha256_m, extra_commit, extra_commit_len); + secp256k1_sha256_write(hash_ctx, &sha256_m, extra_commit, extra_commit_len); } - secp256k1_sha256_finalize(&sha256_m, tmp); + secp256k1_sha256_finalize(hash_ctx, &sha256_m, tmp); secp256k1_sha256_clear(&sha256_m); - if (!secp256k1_borromean_sign(ecmult_gen_ctx, &proof[len], s, pubs, k, sec, rsizes, secidx, rings, tmp, 32)) { + if (!secp256k1_borromean_sign(hash_ctx, ecmult_gen_ctx, &proof[len], s, pubs, k, sec, rsizes, secidx, rings, tmp, 32)) { return 0; } len += 32; @@ -361,7 +361,7 @@ SECP256K1_INLINE static void secp256k1_rangeproof_ch32xor(unsigned char *x, cons } } -SECP256K1_INLINE static int secp256k1_rangeproof_rewind_inner(secp256k1_scalar *blind, uint64_t *v, +SECP256K1_INLINE static int secp256k1_rangeproof_rewind_inner(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *blind, uint64_t *v, unsigned char *m, size_t *mlen, secp256k1_scalar *ev, secp256k1_scalar *s, size_t *rsizes, size_t rings, const unsigned char *nonce, const secp256k1_ge *commit, const unsigned char *proof, size_t len, const secp256k1_ge *genp) { secp256k1_scalar s_orig[128]; @@ -382,7 +382,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_rewind_inner(secp256k1_scalar * VERIFY_CHECK(npub >= 1); memset(prep, 0, 4096); /* Reconstruct the provers random values. */ - secp256k1_rangeproof_genrand(sec, s_orig, prep, rsizes, rings, nonce, commit, proof, len, genp); + secp256k1_rangeproof_genrand(hash_ctx, sec, s_orig, prep, rsizes, rings, nonce, commit, proof, len, genp); *v = UINT64_MAX; secp256k1_scalar_clear(blind); if (rings == 1 && rsizes[0] == 1) { @@ -538,7 +538,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_getheader_impl(size_t *offset, } /* Verifies range proof (len plen) for commit, the min/max values proven are put in the min/max arguments; returns 0 on failure 1 on success.*/ -SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_ecmult_gen_context* ecmult_gen_ctx, +SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_hash_ctx *hash_ctx, const secp256k1_ecmult_gen_context* ecmult_gen_ctx, unsigned char *blindout, uint64_t *value_out, unsigned char *message_out, size_t *outlen, const unsigned char *nonce, uint64_t *min_value, uint64_t *max_value, const secp256k1_ge *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const secp256k1_ge* genp) { secp256k1_gej accj; @@ -587,10 +587,10 @@ SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_ecm } secp256k1_sha256_initialize(&sha256_m); secp256k1_rangeproof_serialize_point(m, commit); - secp256k1_sha256_write(&sha256_m, m, 33); + secp256k1_sha256_write(hash_ctx, &sha256_m, m, 33); secp256k1_rangeproof_serialize_point(m, genp); - secp256k1_sha256_write(&sha256_m, m, 33); - secp256k1_sha256_write(&sha256_m, proof, offset); + secp256k1_sha256_write(hash_ctx, &sha256_m, m, 33); + secp256k1_sha256_write(hash_ctx, &sha256_m, proof, offset); for(i = 0; i < rings - 1; i++) { signs[i] = (proof[offset + ( i>> 3)] & (1 << (i & 7))) != 0; } @@ -617,8 +617,8 @@ SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_ecm } /* Not using secp256k1_rangeproof_serialize_point as we almost have it * serialized form already. */ - secp256k1_sha256_write(&sha256_m, &signs[i], 1); - secp256k1_sha256_write(&sha256_m, &proof[offset], 32); + secp256k1_sha256_write(hash_ctx, &sha256_m, &signs[i], 1); + secp256k1_sha256_write(hash_ctx, &sha256_m, &proof[offset], 32); secp256k1_gej_set_ge(&pubs[npub], &c); secp256k1_gej_add_ge_var(&accj, &accj, &c, NULL); offset += 32; @@ -645,11 +645,11 @@ SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_ecm return 0; } if (extra_commit != NULL) { - secp256k1_sha256_write(&sha256_m, extra_commit, extra_commit_len); + secp256k1_sha256_write(hash_ctx, &sha256_m, extra_commit, extra_commit_len); } - secp256k1_sha256_finalize(&sha256_m, m); + secp256k1_sha256_finalize(hash_ctx, &sha256_m, m); secp256k1_sha256_clear(&sha256_m); - ret = secp256k1_borromean_verify(nonce ? evalues : NULL, e0, s, pubs, rsizes, rings, m, 32); + ret = secp256k1_borromean_verify(hash_ctx, nonce ? evalues : NULL, e0, s, pubs, rsizes, rings, m, 32); if (ret && nonce) { /* Given the nonce, try rewinding the witness to recover its initial state. */ secp256k1_scalar blind; @@ -657,7 +657,7 @@ SECP256K1_INLINE static int secp256k1_rangeproof_verify_impl(const secp256k1_ecm if (!ecmult_gen_ctx) { return 0; } - if (!secp256k1_rangeproof_rewind_inner(&blind, &vv, message_out, outlen, evalues, s, rsizes, rings, nonce, commit, proof, offset_post_header, genp)) { + if (!secp256k1_rangeproof_rewind_inner(hash_ctx, &blind, &vv, message_out, outlen, evalues, s, rsizes, rings, nonce, commit, proof, offset_post_header, genp)) { return 0; } /* Unwind apparently successful, see if the commitment can be reconstructed. */ diff --git a/src/modules/rangeproof/tests_impl.h b/src/modules/rangeproof/tests_impl.h index 7538558e0..002c3707e 100644 --- a/src/modules/rangeproof/tests_impl.h +++ b/src/modules/rangeproof/tests_impl.h @@ -123,6 +123,7 @@ static void test_rangeproof_api_internal(void) { } static void test_borromean_internal(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned char e0[32]; secp256k1_scalar s[64]; secp256k1_gej pubs[64]; @@ -169,11 +170,11 @@ static void test_borromean_internal(void) { } c += rsizes[i]; } - CHECK(secp256k1_borromean_sign(&CTX->ecmult_gen_ctx, e0, s, pubs, k, sec, rsizes, secidx, nrings, m, 32)); - CHECK(secp256k1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + CHECK(secp256k1_borromean_sign(hash_ctx, &CTX->ecmult_gen_ctx, e0, s, pubs, k, sec, rsizes, secidx, nrings, m, 32)); + CHECK(secp256k1_borromean_verify(hash_ctx, NULL, e0, s, pubs, rsizes, nrings, m, 32)); i = testrand32() % c; secp256k1_scalar_negate(&s[i],&s[i]); - CHECK(!secp256k1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + CHECK(!secp256k1_borromean_verify(hash_ctx, NULL, e0, s, pubs, rsizes, nrings, m, 32)); secp256k1_scalar_negate(&s[i],&s[i]); secp256k1_scalar_set_int(&one, 1); for(j = 0; j < 4; j++) { @@ -183,7 +184,7 @@ static void test_borromean_internal(void) { } else { secp256k1_scalar_add(&s[i],&s[i],&one); } - CHECK(!secp256k1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + CHECK(!secp256k1_borromean_verify(hash_ctx, NULL, e0, s, pubs, rsizes, nrings, m, 32)); } } diff --git a/src/modules/schnorrsig/main_impl.h b/src/modules/schnorrsig/main_impl.h index 21c1f4129..5100557f4 100644 --- a/src/modules/schnorrsig/main_impl.h +++ b/src/modules/schnorrsig/main_impl.h @@ -37,7 +37,7 @@ static const unsigned char bip340_algo[] = {'B', 'I', 'P', '0', '3', '4', '0', ' static const unsigned char schnorrsig_extraparams_magic[4] = SECP256K1_SCHNORRSIG_EXTRAPARAMS_MAGIC; -static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *msg, size_t msglen, const unsigned char *key32, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void *data) { +static int nonce_function_bip340_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char *nonce32, const unsigned char *msg, size_t msglen, const unsigned char *key32, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void *data) { secp256k1_sha256 sha; unsigned char masked_key[32]; int i; @@ -48,8 +48,8 @@ static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *ms if (data != NULL) { secp256k1_nonce_function_bip340_sha256_tagged_aux(&sha); - secp256k1_sha256_write(&sha, data, 32); - secp256k1_sha256_finalize(&sha, masked_key); + secp256k1_sha256_write(hash_ctx, &sha, data, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, masked_key); for (i = 0; i < 32; i++) { masked_key[i] ^= key32[i]; } @@ -73,20 +73,24 @@ static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *ms && secp256k1_memcmp_var(algo, bip340_algo, algolen) == 0) { secp256k1_nonce_function_bip340_sha256_tagged(&sha); } else { - secp256k1_sha256_initialize_tagged(&sha, algo, algolen); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, algo, algolen); } /* Hash masked-key||pk||msg using the tagged hash as per the spec */ - secp256k1_sha256_write(&sha, masked_key, 32); - secp256k1_sha256_write(&sha, xonly_pk32, 32); - secp256k1_sha256_write(&sha, msg, msglen); - secp256k1_sha256_finalize(&sha, nonce32); + secp256k1_sha256_write(hash_ctx, &sha, masked_key, 32); + secp256k1_sha256_write(hash_ctx, &sha, xonly_pk32, 32); + secp256k1_sha256_write(hash_ctx, &sha, msg, msglen); + secp256k1_sha256_finalize(hash_ctx, &sha, nonce32); secp256k1_sha256_clear(&sha); secp256k1_memclear_explicit(masked_key, sizeof(masked_key)); return 1; } +static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *msg, size_t msglen, const unsigned char *key32, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void *data) { + return nonce_function_bip340_impl(secp256k1_get_hash_context(secp256k1_context_static), nonce32, msg, msglen, key32, xonly_pk32, algo, algolen, data); +} + const secp256k1_nonce_function_hardened secp256k1_nonce_function_bip340 = nonce_function_bip340; /* Initializes SHA256 with fixed midstate. This midstate was computed by applying @@ -99,17 +103,17 @@ static void secp256k1_schnorrsig_sha256_tagged(secp256k1_sha256 *sha) { secp256k1_sha256_initialize_midstate(sha, 64, midstate); } -static void secp256k1_schnorrsig_challenge(secp256k1_scalar* e, const unsigned char *r32, const unsigned char *msg, size_t msglen, const unsigned char *pubkey32) +static void secp256k1_schnorrsig_challenge(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar* e, const unsigned char *r32, const unsigned char *msg, size_t msglen, const unsigned char *pubkey32) { unsigned char buf[32]; secp256k1_sha256 sha; /* tagged hash(r.x, pk.x, msg) */ secp256k1_schnorrsig_sha256_tagged(&sha); - secp256k1_sha256_write(&sha, r32, 32); - secp256k1_sha256_write(&sha, pubkey32, 32); - secp256k1_sha256_write(&sha, msg, msglen); - secp256k1_sha256_finalize(&sha, buf); + secp256k1_sha256_write(hash_ctx, &sha, r32, 32); + secp256k1_sha256_write(hash_ctx, &sha, pubkey32, 32); + secp256k1_sha256_write(hash_ctx, &sha, msg, msglen); + secp256k1_sha256_finalize(hash_ctx, &sha, buf); /* Set scalar e to the challenge hash modulo the curve order as per * BIP340. */ secp256k1_scalar_set_b32(e, buf, NULL); @@ -133,10 +137,6 @@ static int secp256k1_schnorrsig_sign_internal(const secp256k1_context* ctx, unsi ARG_CHECK(msg != NULL || msglen == 0); ARG_CHECK(keypair != NULL); - if (noncefp == NULL) { - noncefp = secp256k1_nonce_function_bip340; - } - ret &= secp256k1_keypair_load(ctx, &sk, &pk, keypair); /* Because we are signing for a x-only pubkey, the secret key is negated * before signing if the point corresponding to the secret key does not @@ -147,7 +147,15 @@ static int secp256k1_schnorrsig_sign_internal(const secp256k1_context* ctx, unsi secp256k1_scalar_get_b32(seckey, &sk); secp256k1_fe_get_b32(pk_buf, &pk.x); - ret &= !!noncefp(nonce32, msg, msglen, seckey, pk_buf, bip340_algo, sizeof(bip340_algo), ndata); + + /* Compute nonce */ + if (noncefp == NULL || noncefp == secp256k1_nonce_function_bip340) { + /* Use context-aware nonce function by default */ + ret &= nonce_function_bip340_impl(secp256k1_get_hash_context(ctx), nonce32, msg, msglen, seckey, pk_buf, bip340_algo, sizeof(bip340_algo), ndata); + } else { + ret &= !!noncefp(nonce32, msg, msglen, seckey, pk_buf, bip340_algo, sizeof(bip340_algo), ndata); + } + secp256k1_scalar_set_b32(&k, nonce32, NULL); ret &= !secp256k1_scalar_is_zero(&k); secp256k1_scalar_cmov(&k, &secp256k1_scalar_one, !ret); @@ -165,7 +173,7 @@ static int secp256k1_schnorrsig_sign_internal(const secp256k1_context* ctx, unsi secp256k1_fe_normalize_var(&r.x); secp256k1_fe_get_b32(&sig64[0], &r.x); - secp256k1_schnorrsig_challenge(&e, &sig64[0], msg, msglen, pk_buf); + secp256k1_schnorrsig_challenge(secp256k1_get_hash_context(ctx), &e, &sig64[0], msg, msglen, pk_buf); secp256k1_scalar_mul(&e, &e, &sk); secp256k1_scalar_add(&e, &e, &k); secp256k1_scalar_get_b32(&sig64[32], &e); @@ -235,7 +243,7 @@ int secp256k1_schnorrsig_verify(const secp256k1_context* ctx, const unsigned cha /* Compute e. */ secp256k1_fe_get_b32(buf, &pk.x); - secp256k1_schnorrsig_challenge(&e, &sig64[0], msg, msglen, buf); + secp256k1_schnorrsig_challenge(secp256k1_get_hash_context(ctx), &e, &sig64[0], msg, msglen, buf); /* Compute rj = s*G + (-e)*pkj */ secp256k1_scalar_negate(&e, &e); diff --git a/src/modules/schnorrsig/tests_exhaustive_impl.h b/src/modules/schnorrsig/tests_exhaustive_impl.h index 601b54975..57efe348b 100644 --- a/src/modules/schnorrsig/tests_exhaustive_impl.h +++ b/src/modules/schnorrsig/tests_exhaustive_impl.h @@ -56,7 +56,7 @@ static const unsigned char invalid_pubkey_bytes[][32] = { } }; -#define NUM_INVALID_KEYS (sizeof(invalid_pubkey_bytes) / sizeof(invalid_pubkey_bytes[0])) +#define NUM_INVALID_KEYS (ARRAY_SIZE(invalid_pubkey_bytes)) static int secp256k1_hardened_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg, size_t msglen, @@ -105,7 +105,7 @@ static void test_exhaustive_schnorrsig_verify(const secp256k1_context *ctx, cons secp256k1_scalar e; unsigned char msg32[32]; testrand256(msg32); - secp256k1_schnorrsig_challenge(&e, sig64, msg32, sizeof(msg32), pk32); + secp256k1_schnorrsig_challenge(secp256k1_get_hash_context(ctx), &e, sig64, msg32, sizeof(msg32), pk32); /* Only do work if we hit a challenge we haven't tried before. */ if (!e_done[e]) { /* Iterate over the possible valid last 32 bytes in the signature. @@ -162,7 +162,7 @@ static void test_exhaustive_schnorrsig_sign(const secp256k1_context *ctx, unsign while (e_count_done < EXHAUSTIVE_TEST_ORDER) { secp256k1_scalar e; testrand256(msg32); - secp256k1_schnorrsig_challenge(&e, xonly_pubkey_bytes[k - 1], msg32, sizeof(msg32), xonly_pubkey_bytes[d - 1]); + secp256k1_schnorrsig_challenge(secp256k1_get_hash_context(ctx), &e, xonly_pubkey_bytes[k - 1], msg32, sizeof(msg32), xonly_pubkey_bytes[d - 1]); /* Only do work if we hit a challenge we haven't tried before. */ if (!e_done[e]) { secp256k1_scalar expected_s = (actual_k + e * actual_d) % EXHAUSTIVE_TEST_ORDER; diff --git a/src/modules/schnorrsig/tests_impl.h b/src/modules/schnorrsig/tests_impl.h index 9a1b15f0b..56812e7f0 100644 --- a/src/modules/schnorrsig/tests_impl.h +++ b/src/modules/schnorrsig/tests_impl.h @@ -38,18 +38,20 @@ static void run_nonce_function_bip340_tests(void) { unsigned char *args[5]; int i; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); + /* Check that hash initialized by * secp256k1_nonce_function_bip340_sha256_tagged has the expected * state. */ secp256k1_nonce_function_bip340_sha256_tagged(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, tag, sizeof(tag)); /* Check that hash initialized by * secp256k1_nonce_function_bip340_sha256_tagged_aux has the expected * state. */ secp256k1_nonce_function_bip340_sha256_tagged_aux(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, aux_tag, sizeof(aux_tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, aux_tag, sizeof(aux_tag)); testrand256(msg); testrand256(key); @@ -162,8 +164,9 @@ static void test_schnorrsig_sha256_tagged(void) { unsigned char tag[] = {'B', 'I', 'P', '0', '3', '4', '0', '/', 'c', 'h', 'a', 'l', 'l', 'e', 'n', 'g', 'e'}; secp256k1_sha256 sha; secp256k1_sha256 sha_optimized; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); - secp256k1_sha256_initialize_tagged(&sha, (unsigned char *) tag, sizeof(tag)); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, (unsigned char *) tag, sizeof(tag)); secp256k1_schnorrsig_sha256_tagged(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); } @@ -849,6 +852,29 @@ static void test_schnorrsig_sign_internal(void) { CHECK(secp256k1_memcmp_var(sig, sig2, sizeof(sig)) == 0); } +DEFINE_SHA256_TRANSFORM_PROBE(sha256_schnorrsig) +static void test_schnorrsig_ctx_sha256(void) { + /* Check ctx-provided SHA256 compression override takes effect */ + secp256k1_context *ctx = secp256k1_context_clone(CTX); + unsigned char out_default[64], out_custom[64]; + unsigned char sk[32] = {1}, msg32[32] = {1}; + secp256k1_keypair keypair; + CHECK(secp256k1_keypair_create(ctx, &keypair, sk)); + + /* Default behavior. No ctx-provided SHA256 compression */ + CHECK(secp256k1_schnorrsig_sign32(ctx, out_default, msg32, &keypair, NULL)); + CHECK(!sha256_schnorrsig_called); + + /* Override SHA256 compression directly, bypassing the ctx setter sanity checks */ + ctx->hash_ctx.fn_sha256_compression = sha256_schnorrsig; + CHECK(secp256k1_schnorrsig_sign32(ctx, out_custom, msg32, &keypair, NULL)); + CHECK(sha256_schnorrsig_called); + /* Outputs must differ if custom compression was used */ + CHECK(secp256k1_memcmp_var(out_default, out_custom, 64) != 0); + + secp256k1_context_destroy(ctx); +} + #define N_SIGS 3 /* Creates N_SIGS valid signatures and verifies them with verify and * verify_batch (TODO). Then flips some bits and checks that verification now @@ -978,6 +1004,7 @@ static const struct tf_test_entry tests_schnorrsig[] = { CASE1(test_schnorrsig_sign), CASE1(test_schnorrsig_sign_verify), CASE1(test_schnorrsig_taproot), + CASE1(test_schnorrsig_ctx_sha256), }; #endif diff --git a/src/modules/schnorrsig_halfagg/main_impl.h b/src/modules/schnorrsig_halfagg/main_impl.h index 5d424a389..466c819b8 100644 --- a/src/modules/schnorrsig_halfagg/main_impl.h +++ b/src/modules/schnorrsig_halfagg/main_impl.h @@ -21,6 +21,7 @@ int secp256k1_schnorrsig_inc_aggregate(const secp256k1_context *ctx, unsigned ch size_t n; secp256k1_sha256 hash; secp256k1_scalar s; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); VERIFY_CHECK(ctx != NULL); ARG_CHECK(aggsig != NULL); @@ -47,11 +48,11 @@ int secp256k1_schnorrsig_inc_aggregate(const secp256k1_context *ctx, unsigned ch return 0; } /* write r_i */ - secp256k1_sha256_write(&hash, &aggsig[i*32], 32); + secp256k1_sha256_write(hash_ctx, &hash, &aggsig[i*32], 32); /* write pk_i */ - secp256k1_sha256_write(&hash, pk_ser, 32); + secp256k1_sha256_write(hash_ctx, &hash, pk_ser, 32); /* write m_i*/ - secp256k1_sha256_write(&hash, &all_msgs32[i*32], 32); + secp256k1_sha256_write(hash_ctx, &hash, &all_msgs32[i*32], 32); } /* Compute s = s_old + sum_{i = n_before}^{n} z_i*s_i */ @@ -72,13 +73,13 @@ int secp256k1_schnorrsig_inc_aggregate(const secp256k1_context *ctx, unsigned ch /* Step 1: z_i = TaggedHash(...) */ /* 1.a) Write into hash r_i, pk_i, m_i, r_i */ - secp256k1_sha256_write(&hash, &new_sigs64[(i-n_before)*64], 32); - secp256k1_sha256_write(&hash, pk_ser, 32); - secp256k1_sha256_write(&hash, &all_msgs32[i*32], 32); + secp256k1_sha256_write(hash_ctx, &hash, &new_sigs64[(i-n_before)*64], 32); + secp256k1_sha256_write(hash_ctx, &hash, pk_ser, 32); + secp256k1_sha256_write(hash_ctx, &hash, &all_msgs32[i*32], 32); /* 1.b) Copy the hash */ hashcopy = hash; /* 1.c) Finalize the copy to get zi*/ - secp256k1_sha256_finalize(&hashcopy, hashoutput); + secp256k1_sha256_finalize(hash_ctx, &hashcopy, hashoutput); secp256k1_sha256_clear(&hashcopy); /* Note: No need to check overflow, comes from hash */ secp256k1_scalar_set_b32(&zi, hashoutput, NULL); @@ -105,6 +106,7 @@ int secp256k1_schnorrsig_aggregate(const secp256k1_context *ctx, unsigned char * } int secp256k1_schnorrsig_aggverify(const secp256k1_context *ctx, const secp256k1_xonly_pubkey *pubkeys, const unsigned char *msgs32, size_t n, const unsigned char *aggsig, size_t aggsig_len) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); size_t i; secp256k1_gej lhs, rhs; secp256k1_scalar s; @@ -150,13 +152,13 @@ int secp256k1_schnorrsig_aggverify(const secp256k1_context *ctx, const secp256k1 /* Step 1: z_i = TaggedHash(...) */ /* 1.a) Write into hash r_i, pk_i, m_i, r_i */ - secp256k1_sha256_write(&hash, &aggsig[i*32], 32); - secp256k1_sha256_write(&hash, pk_ser, 32); - secp256k1_sha256_write(&hash, &msgs32[i*32], 32); + secp256k1_sha256_write(hash_ctx, &hash, &aggsig[i*32], 32); + secp256k1_sha256_write(hash_ctx, &hash, pk_ser, 32); + secp256k1_sha256_write(hash_ctx, &hash, &msgs32[i*32], 32); /* 1.b) Copy the hash */ hashcopy = hash; /* 1.c) Finalize the copy to get zi*/ - secp256k1_sha256_finalize(&hashcopy, hashoutput); + secp256k1_sha256_finalize(hash_ctx, &hashcopy, hashoutput); secp256k1_sha256_clear(&hashcopy); secp256k1_scalar_set_b32(&zi, hashoutput, NULL); @@ -170,7 +172,7 @@ int secp256k1_schnorrsig_aggverify(const secp256k1_context *ctx, const secp256k1 } /* 2.b) e_i = int(hash_{BIP0340/challenge}(bytes(r_i) || pk_i || m_i)) mod n */ - secp256k1_schnorrsig_challenge(&ei, &aggsig[i*32], &msgs32[i*32], 32, pk_ser); + secp256k1_schnorrsig_challenge(hash_ctx, &ei, &aggsig[i*32], &msgs32[i*32], 32, pk_ser); secp256k1_gej_set_ge(&ppj, &pp); /* 2.c) T_i = R_i + e_i*P_i */ secp256k1_ecmult(&ti, &ppj, &ei, NULL); diff --git a/src/modules/schnorrsig_halfagg/tests_impl.h b/src/modules/schnorrsig_halfagg/tests_impl.h index 29d39b2c8..f4092842f 100644 --- a/src/modules/schnorrsig_halfagg/tests_impl.h +++ b/src/modules/schnorrsig_halfagg/tests_impl.h @@ -11,9 +11,10 @@ void test_schnorrsig_sha256_tagged_aggregate(void) { static const unsigned char tag[] = {'H', 'a', 'l', 'f', 'A', 'g', 'g', '/', 'r', 'a', 'n', 'd', 'o', 'm', 'i', 'z', 'e', 'r'}; secp256k1_sha256 sha_optimized; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_schnorrsig_sha256_tagged_aggregation(&sha_optimized); - test_sha256_tag_midstate(&sha_optimized, tag, sizeof(tag)); + test_sha256_tag_midstate(hash_ctx, &sha_optimized, tag, sizeof(tag)); } /* Create n many x-only pubkeys and sigs for random messages */ diff --git a/src/modules/surjection/main_impl.h b/src/modules/surjection/main_impl.h index 248efe145..968cc4774 100644 --- a/src/modules/surjection/main_impl.h +++ b/src/modules/surjection/main_impl.h @@ -138,7 +138,7 @@ static void secp256k1_surjectionproof_csprng_init(secp256k1_surjectionproof_cspr csprng->state_i = 0; } -static size_t secp256k1_surjectionproof_csprng_next(secp256k1_surjectionproof_csprng *csprng, size_t rand_max) { +static size_t secp256k1_surjectionproof_csprng_next(const secp256k1_hash_ctx *hash_ctx, secp256k1_surjectionproof_csprng *csprng, size_t rand_max) { /* The number of random bytes to read for each random sample */ const size_t increment = rand_max > 256 ? 2 : 1; /* The maximum value expressable by the number of random bytes we read */ @@ -151,8 +151,8 @@ static size_t secp256k1_surjectionproof_csprng_next(secp256k1_surjectionproof_cs if (csprng->state_i + increment >= 32) { secp256k1_sha256 sha; secp256k1_sha256_initialize(&sha); - secp256k1_sha256_write(&sha, csprng->state, 32); - secp256k1_sha256_finalize(&sha, csprng->state); + secp256k1_sha256_write(hash_ctx, &sha, csprng->state, 32); + secp256k1_sha256_finalize(hash_ctx, &sha, csprng->state); secp256k1_sha256_clear(&sha); csprng->state_i = 0; } @@ -212,6 +212,7 @@ void secp256k1_surjectionproof_destroy(secp256k1_surjectionproof* proof) { } int secp256k1_surjectionproof_initialize(const secp256k1_context* ctx, secp256k1_surjectionproof* proof, size_t *input_index, const secp256k1_fixed_asset_tag* fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, const secp256k1_fixed_asset_tag* fixed_output_tag, const size_t n_max_iterations, const unsigned char *random_seed32) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_surjectionproof_csprng csprng; size_t n_iterations = 0; @@ -239,7 +240,7 @@ int secp256k1_surjectionproof_initialize(const secp256k1_context* ctx, secp256k1 for (i = 0; i < n_input_tags_to_use; i++) { while (1) { size_t next_input_index; - next_input_index = secp256k1_surjectionproof_csprng_next(&csprng, n_input_tags); + next_input_index = secp256k1_surjectionproof_csprng_next(hash_ctx, &csprng, n_input_tags); if (secp256k1_memcmp_var(&fixed_input_tags[next_input_index], fixed_output_tag, sizeof(*fixed_output_tag)) == 0) { *input_index = next_input_index; has_output_tag = 1; @@ -270,6 +271,7 @@ int secp256k1_surjectionproof_initialize(const secp256k1_context* ctx, secp256k1 } int secp256k1_surjectionproof_generate(const secp256k1_context* ctx, secp256k1_surjectionproof* proof, const secp256k1_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const secp256k1_generator* ephemeral_output_tag, size_t input_index, const unsigned char *input_blinding_key, const unsigned char *output_blinding_key) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_scalar blinding_key; secp256k1_scalar tmps; secp256k1_scalar nonce; @@ -334,8 +336,8 @@ int secp256k1_surjectionproof_generate(const secp256k1_context* ctx, secp256k1_s /* Produce signature */ rsizes[0] = (int) n_used_pubkeys; indices[0] = (int) ring_input_index; - secp256k1_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); - if (secp256k1_surjection_genrand(borromean_s, n_used_pubkeys, &blinding_key) == 0) { + secp256k1_surjection_genmessage(hash_ctx, msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); + if (secp256k1_surjection_genrand(hash_ctx, borromean_s, n_used_pubkeys, &blinding_key) == 0) { return 0; } /* Borromean sign will overwrite one of the s values we just generated, so use @@ -343,7 +345,7 @@ int secp256k1_surjectionproof_generate(const secp256k1_context* ctx, secp256k1_s * homage to the rangeproof code which does this very cleverly to encode messages. */ nonce = borromean_s[ring_input_index]; secp256k1_scalar_clear(&borromean_s[ring_input_index]); - if (secp256k1_borromean_sign(&ctx->ecmult_gen_ctx, &proof->data[0], borromean_s, ring_pubkeys, &nonce, &blinding_key, rsizes, indices, 1, msg32, 32) == 0) { + if (secp256k1_borromean_sign(hash_ctx, &ctx->ecmult_gen_ctx, &proof->data[0], borromean_s, ring_pubkeys, &nonce, &blinding_key, rsizes, indices, 1, msg32, 32) == 0) { return 0; } for (i = 0; i < n_used_pubkeys; i++) { @@ -356,6 +358,7 @@ int secp256k1_surjectionproof_generate(const secp256k1_context* ctx, secp256k1_s static #endif int secp256k1_surjectionproof_verify(const secp256k1_context* ctx, const secp256k1_surjectionproof* proof, const secp256k1_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const secp256k1_generator* ephemeral_output_tag) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); size_t rsizes[1]; /* array needed for borromean sig API */ size_t i; size_t n_total_pubkeys; @@ -394,8 +397,8 @@ int secp256k1_surjectionproof_verify(const secp256k1_context* ctx, const secp256 return 0; } } - secp256k1_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); - return secp256k1_borromean_verify(NULL, &proof->data[0], borromean_s, ring_pubkeys, rsizes, 1, msg32, 32); + secp256k1_surjection_genmessage(hash_ctx, msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); + return secp256k1_borromean_verify(hash_ctx, NULL, &proof->data[0], borromean_s, ring_pubkeys, rsizes, 1, msg32, 32); } #endif diff --git a/src/modules/surjection/surjection_impl.h b/src/modules/surjection/surjection_impl.h index 0776e4c82..bccced522 100644 --- a/src/modules/surjection/surjection_impl.h +++ b/src/modules/surjection/surjection_impl.h @@ -15,7 +15,7 @@ #include "../../scalar.h" #include "../../hash.h" -SECP256K1_INLINE static void secp256k1_surjection_genmessage(unsigned char *msg32, const secp256k1_generator *ephemeral_input_tags, size_t n_input_tags, const secp256k1_generator *ephemeral_output_tag) { +SECP256K1_INLINE static void secp256k1_surjection_genmessage(const secp256k1_hash_ctx *hash_ctx, unsigned char *msg32, const secp256k1_generator *ephemeral_input_tags, size_t n_input_tags, const secp256k1_generator *ephemeral_output_tag) { /* compute message */ size_t i; unsigned char pk_ser[33]; @@ -26,16 +26,16 @@ SECP256K1_INLINE static void secp256k1_surjection_genmessage(unsigned char *msg3 for (i = 0; i < n_input_tags; i++) { pk_ser[0] = 2 + (ephemeral_input_tags[i].data[63] & 1); memcpy(&pk_ser[1], &ephemeral_input_tags[i].data[0], 32); - secp256k1_sha256_write(&sha256_en, pk_ser, pk_len); + secp256k1_sha256_write(hash_ctx, &sha256_en, pk_ser, pk_len); } pk_ser[0] = 2 + (ephemeral_output_tag->data[63] & 1); memcpy(&pk_ser[1], &ephemeral_output_tag->data[0], 32); - secp256k1_sha256_write(&sha256_en, pk_ser, pk_len); - secp256k1_sha256_finalize(&sha256_en, msg32); + secp256k1_sha256_write(hash_ctx, &sha256_en, pk_ser, pk_len); + secp256k1_sha256_finalize(hash_ctx, &sha256_en, msg32); secp256k1_sha256_clear(&sha256_en); } -SECP256K1_INLINE static int secp256k1_surjection_genrand(secp256k1_scalar *s, size_t ns, const secp256k1_scalar *blinding_key) { +SECP256K1_INLINE static int secp256k1_surjection_genrand(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar *s, size_t ns, const secp256k1_scalar *blinding_key) { size_t i; unsigned char sec_input[36]; secp256k1_sha256 sha256_en; @@ -50,8 +50,8 @@ SECP256K1_INLINE static int secp256k1_surjection_genrand(secp256k1_scalar *s, si sec_input[3] = i >> 24; secp256k1_sha256_initialize(&sha256_en); - secp256k1_sha256_write(&sha256_en, sec_input, 36); - secp256k1_sha256_finalize(&sha256_en, sec_input); + secp256k1_sha256_write(hash_ctx, &sha256_en, sec_input, 36); + secp256k1_sha256_finalize(hash_ctx, &sha256_en, sec_input); secp256k1_sha256_clear(&sha256_en); secp256k1_scalar_set_b32(&s[i], sec_input, &overflow); if (overflow == 1) { diff --git a/src/modules/surjection/tests_impl.h b/src/modules/surjection/tests_impl.h index d81ac6240..7ba328c32 100644 --- a/src/modules/surjection/tests_impl.h +++ b/src/modules/surjection/tests_impl.h @@ -26,7 +26,7 @@ static void test_surjectionproof_api(void) { size_t serialized_len; secp256k1_surjectionproof proof; secp256k1_surjectionproof* proof_on_heap; - size_t n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]); + size_t n_inputs = ARRAY_SIZE(fixed_input_tags); size_t input_index; size_t i; @@ -146,7 +146,7 @@ static void test_input_selection(size_t n_inputs) { size_t try_count = n_inputs * 100; secp256k1_surjectionproof proof; secp256k1_fixed_asset_tag fixed_input_tags[1000]; - const size_t max_n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]) - 1; + const size_t max_n_inputs = ARRAY_SIZE(fixed_input_tags) - 1; CHECK(n_inputs < max_n_inputs); testrand256(seed); @@ -313,7 +313,7 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { secp256k1_fixed_asset_tag fixed_input_tags[1000]; secp256k1_generator ephemeral_input_tags[1000]; unsigned char *input_blinding_key[1000]; - const size_t max_n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]) - 1; + const size_t max_n_inputs = ARRAY_SIZE(fixed_input_tags) - 1; size_t try_count = n_inputs * 100; size_t key_index; size_t input_index; @@ -395,6 +395,7 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { /* check that a proof with empty n_used_inputs is invalid */ static void test_no_used_inputs_verify(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_surjectionproof proof; secp256k1_fixed_asset_tag fixed_input_tag; secp256k1_fixed_asset_tag fixed_output_tag; @@ -422,10 +423,10 @@ static void test_no_used_inputs_verify(void) { /* create "borromean signature" which is just a hash of metadata (pubkeys, etc) in this case */ secp256k1_generator_load(&output, &ephemeral_output_tag); - secp256k1_surjection_genmessage(proof.data, ephemeral_input_tags, 1, &ephemeral_output_tag); + secp256k1_surjection_genmessage(hash_ctx, proof.data, ephemeral_input_tags, 1, &ephemeral_output_tag); secp256k1_sha256_initialize(&sha256_e0); - secp256k1_sha256_write(&sha256_e0, proof.data, 32); - secp256k1_sha256_finalize(&sha256_e0, proof.data); + secp256k1_sha256_write(hash_ctx, &sha256_e0, proof.data, 32); + secp256k1_sha256_finalize(hash_ctx, &sha256_e0, proof.data); result = secp256k1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_ephemeral_input_tags, &ephemeral_output_tag); CHECK(result == 0); diff --git a/src/modules/whitelist/main_impl.h b/src/modules/whitelist/main_impl.h index 301d24766..28c563b2d 100644 --- a/src/modules/whitelist/main_impl.h +++ b/src/modules/whitelist/main_impl.h @@ -13,6 +13,7 @@ #define MAX_KEYS SECP256K1_WHITELIST_MAX_N_KEYS /* shorter alias */ int secp256k1_whitelist_sign(const secp256k1_context* ctx, secp256k1_whitelist_signature *sig, const secp256k1_pubkey *online_pubkeys, const secp256k1_pubkey *offline_pubkeys, const size_t n_keys, const secp256k1_pubkey *sub_pubkey, const unsigned char *online_seckey, const unsigned char *summed_seckey, const size_t index) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_gej pubs[MAX_KEYS]; secp256k1_scalar s[MAX_KEYS]; secp256k1_scalar sec, non; @@ -85,7 +86,7 @@ int secp256k1_whitelist_sign(const secp256k1_context* ctx, secp256k1_whitelist_s /* Actually sign */ if (ret) { sig->n_keys = n_keys; - ret = secp256k1_borromean_sign(&ctx->ecmult_gen_ctx, &sig->data[0], s, pubs, &non, &sec, &n_keys, &index, 1, msg32, 32); + ret = secp256k1_borromean_sign(hash_ctx, &ctx->ecmult_gen_ctx, &sig->data[0], s, pubs, &non, &sec, &n_keys, &index, 1, msg32, 32); /* Signing will change s[index], so update in the sig structure */ secp256k1_scalar_get_b32(&sig->data[32 * (index + 1)], &s[index]); } @@ -96,6 +97,7 @@ int secp256k1_whitelist_sign(const secp256k1_context* ctx, secp256k1_whitelist_s } int secp256k1_whitelist_verify(const secp256k1_context* ctx, const secp256k1_whitelist_signature *sig, const secp256k1_pubkey *online_pubkeys, const secp256k1_pubkey *offline_pubkeys, const size_t n_keys, const secp256k1_pubkey *sub_pubkey) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); secp256k1_scalar s[MAX_KEYS]; secp256k1_gej pubs[MAX_KEYS]; unsigned char msg32[32]; @@ -123,7 +125,7 @@ int secp256k1_whitelist_verify(const secp256k1_context* ctx, const secp256k1_whi return 0; } /* Do verification */ - return secp256k1_borromean_verify(NULL, &sig->data[0], s, pubs, &sig->n_keys, 1, msg32, 32); + return secp256k1_borromean_verify(hash_ctx, NULL, &sig->data[0], s, pubs, &sig->n_keys, 1, msg32, 32); } size_t secp256k1_whitelist_signature_n_keys(const secp256k1_whitelist_signature *sig) { diff --git a/src/modules/whitelist/whitelist_impl.h b/src/modules/whitelist/whitelist_impl.h index edb90b4f7..9a9023fb8 100644 --- a/src/modules/whitelist/whitelist_impl.h +++ b/src/modules/whitelist/whitelist_impl.h @@ -7,7 +7,7 @@ #ifndef SECP256K1_WHITELIST_IMPL_H #define SECP256K1_WHITELIST_IMPL_H -static int secp256k1_whitelist_hash_pubkey(secp256k1_scalar* output, secp256k1_gej* pubkey) { +static int secp256k1_whitelist_hash_pubkey(const secp256k1_hash_ctx *hash_ctx, secp256k1_scalar* output, secp256k1_gej* pubkey) { unsigned char h[32]; unsigned char c[33]; secp256k1_sha256 sha; @@ -22,8 +22,8 @@ static int secp256k1_whitelist_hash_pubkey(secp256k1_scalar* output, secp256k1_g return 0; } secp256k1_eckey_pubkey_serialize33(&ge, c); - secp256k1_sha256_write(&sha, c, size); - secp256k1_sha256_finalize(&sha, h); + secp256k1_sha256_write(hash_ctx, &sha, c, size); + secp256k1_sha256_finalize(hash_ctx, &sha, h); secp256k1_sha256_clear(&sha); secp256k1_scalar_set_b32(output, h, &overflow); @@ -35,14 +35,14 @@ static int secp256k1_whitelist_hash_pubkey(secp256k1_scalar* output, secp256k1_g return 1; } -static int secp256k1_whitelist_tweak_pubkey(secp256k1_gej* pub_tweaked) { +static int secp256k1_whitelist_tweak_pubkey(const secp256k1_hash_ctx *hash_ctx, secp256k1_gej* pub_tweaked) { secp256k1_scalar tweak; secp256k1_scalar zero; int ret; secp256k1_scalar_set_int(&zero, 0); - ret = secp256k1_whitelist_hash_pubkey(&tweak, pub_tweaked); + ret = secp256k1_whitelist_hash_pubkey(hash_ctx, &tweak, pub_tweaked); if (ret) { secp256k1_ecmult(pub_tweaked, pub_tweaked, &tweak, &zero); } @@ -51,6 +51,7 @@ static int secp256k1_whitelist_tweak_pubkey(secp256k1_gej* pub_tweaked) { static int secp256k1_whitelist_compute_tweaked_privkey(const secp256k1_context* ctx, secp256k1_scalar* skey, const unsigned char *online_key, const unsigned char *summed_key) { secp256k1_scalar tweak; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); int ret = 1; int overflow = 0; @@ -61,7 +62,7 @@ static int secp256k1_whitelist_compute_tweaked_privkey(const secp256k1_context* if (ret) { secp256k1_gej pkeyj; secp256k1_ecmult_gen(&ctx->ecmult_gen_ctx, &pkeyj, skey); - ret = secp256k1_whitelist_hash_pubkey(&tweak, &pkeyj); + ret = secp256k1_whitelist_hash_pubkey(hash_ctx, &tweak, &pkeyj); } if (ret) { secp256k1_scalar sonline; @@ -86,6 +87,7 @@ static int secp256k1_whitelist_compute_tweaked_privkey(const secp256k1_context* * for the ring signature; also produce a commitment to every one that will * be our "message". */ static int secp256k1_whitelist_compute_keys_and_message(const secp256k1_context* ctx, unsigned char *msg32, secp256k1_gej *keys, const secp256k1_pubkey *online_pubkeys, const secp256k1_pubkey *offline_pubkeys, const int n_keys, const secp256k1_pubkey *sub_pubkey) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); unsigned char c[33]; size_t size = 33; secp256k1_sha256 sha; @@ -97,7 +99,7 @@ static int secp256k1_whitelist_compute_keys_and_message(const secp256k1_context* /* commit to sub-key */ secp256k1_eckey_pubkey_serialize33(&subkey_ge, c); - secp256k1_sha256_write(&sha, c, size); + secp256k1_sha256_write(hash_ctx, &sha, c, size); for (i = 0; i < n_keys; i++) { secp256k1_ge offline_ge; secp256k1_ge online_ge; @@ -106,18 +108,18 @@ static int secp256k1_whitelist_compute_keys_and_message(const secp256k1_context* /* commit to fixed keys */ secp256k1_pubkey_load(ctx, &offline_ge, &offline_pubkeys[i]); secp256k1_eckey_pubkey_serialize33(&offline_ge, c); - secp256k1_sha256_write(&sha, c, size); + secp256k1_sha256_write(hash_ctx, &sha, c, size); secp256k1_pubkey_load(ctx, &online_ge, &online_pubkeys[i]); secp256k1_eckey_pubkey_serialize33(&online_ge, c); - secp256k1_sha256_write(&sha, c, size); + secp256k1_sha256_write(hash_ctx, &sha, c, size); /* compute tweaked keys */ secp256k1_gej_set_ge(&tweaked_gej, &offline_ge); secp256k1_gej_add_ge_var(&tweaked_gej, &tweaked_gej, &subkey_ge, NULL); - secp256k1_whitelist_tweak_pubkey(&tweaked_gej); + secp256k1_whitelist_tweak_pubkey(hash_ctx, &tweaked_gej); secp256k1_gej_add_ge_var(&keys[i], &tweaked_gej, &online_ge, NULL); } - secp256k1_sha256_finalize(&sha, msg32); + secp256k1_sha256_finalize(hash_ctx, &sha, msg32); secp256k1_sha256_clear(&sha); return 1; } diff --git a/src/precompute_ecmult_gen.c b/src/precompute_ecmult_gen.c index cd0fe70fc..a03abdb54 100644 --- a/src/precompute_ecmult_gen.c +++ b/src/precompute_ecmult_gen.c @@ -80,7 +80,7 @@ int main(int argc, char **argv) { fprintf(fp, "const secp256k1_ge_storage secp256k1_ecmult_gen_prec_table[COMB_BLOCKS][COMB_POINTS] = {\n"); fprintf(fp, "#if 0\n"); - for (config = 0; config < sizeof(CONFIGS) / sizeof(*CONFIGS); ++config) { + for (config = 0; config < ARRAY_SIZE(CONFIGS); ++config) { print_table(fp, CONFIGS[config][0], CONFIGS[config][1]); if (CONFIGS[config][0] == COMB_BLOCKS && CONFIGS[config][1] == COMB_TEETH) { did_current_config = 1; diff --git a/src/secp256k1.c b/src/secp256k1.c index f1388f3e6..a3d37bd21 100644 --- a/src/secp256k1.c +++ b/src/secp256k1.c @@ -81,6 +81,7 @@ static void secp256k1_ecdsa_s2c_opening_save(secp256k1_ecdsa_s2c_opening* openin * context_eq function. */ struct secp256k1_context_struct { secp256k1_ecmult_gen_context ecmult_gen_ctx; + secp256k1_hash_ctx hash_ctx; secp256k1_callback illegal_callback; secp256k1_callback error_callback; int declassify; @@ -88,6 +89,7 @@ struct secp256k1_context_struct { static const secp256k1_context secp256k1_context_static_ = { { 0 }, + { secp256k1_sha256_transform }, { secp256k1_default_illegal_callback_fn, 0 }, { secp256k1_default_error_callback_fn, 0 }, 0 @@ -150,10 +152,11 @@ secp256k1_context* secp256k1_context_preallocated_create(void* prealloc, unsigne ret = (secp256k1_context*)prealloc; ret->illegal_callback = default_illegal_callback; ret->error_callback = default_error_callback; + secp256k1_hash_ctx_init(&ret->hash_ctx); /* Flags have been checked by secp256k1_context_preallocated_size. */ VERIFY_CHECK((flags & SECP256K1_FLAGS_TYPE_MASK) == SECP256K1_FLAGS_TYPE_CONTEXT); - secp256k1_ecmult_gen_context_build(&ret->ecmult_gen_ctx); + secp256k1_ecmult_gen_context_build(&ret->ecmult_gen_ctx, &ret->hash_ctx); ret->declassify = !!(flags & SECP256K1_FLAGS_BIT_CONTEXT_DECLASSIFY); return ret; @@ -241,6 +244,22 @@ void secp256k1_context_set_error_callback(secp256k1_context* ctx, void (*fun)(co ctx->error_callback.data = data; } +void secp256k1_context_set_sha256_compression(secp256k1_context *ctx, secp256k1_sha256_compression_function fn_compression) { + VERIFY_CHECK(ctx != NULL); + ARG_CHECK_VOID(secp256k1_context_is_proper(ctx)); + if (!fn_compression) { /* Reset hash context */ + secp256k1_hash_ctx_init(&ctx->hash_ctx); + return; + } + /* Check and set */ + ARG_CHECK_VOID(secp256k1_selftest_sha256(fn_compression)); + ctx->hash_ctx.fn_sha256_compression = fn_compression; +} + +static SECP256K1_INLINE const secp256k1_hash_ctx* secp256k1_get_hash_context(const secp256k1_context *ctx) { + return &ctx->hash_ctx; +} + static secp256k1_scratch_space* secp256k1_scratch_space_create(const secp256k1_context* ctx, size_t max_size) { VERIFY_CHECK(ctx != NULL); return secp256k1_scratch_create(&ctx->error_callback, max_size); @@ -497,7 +516,7 @@ static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *off *offset += len; } -static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) { +static int nonce_function_rfc6979_impl(const secp256k1_hash_ctx *hash_ctx, unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) { unsigned char keydata[112]; unsigned int offset = 0; secp256k1_rfc6979_hmac_sha256 rng; @@ -522,9 +541,9 @@ static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *m if (algo16 != NULL) { buffer_append(keydata, &offset, algo16, 16); } - secp256k1_rfc6979_hmac_sha256_initialize(&rng, keydata, offset); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, keydata, offset); for (i = 0; i <= counter; i++) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, nonce32, 32); } secp256k1_rfc6979_hmac_sha256_finalize(&rng); @@ -533,11 +552,16 @@ static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *m return 1; } +static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) { + return nonce_function_rfc6979_impl(secp256k1_get_hash_context(secp256k1_context_static), nonce32, msg32, key32, algo16, data, counter); +} + const secp256k1_nonce_function secp256k1_nonce_function_rfc6979 = nonce_function_rfc6979; const secp256k1_nonce_function secp256k1_nonce_function_default = nonce_function_rfc6979; static int secp256k1_ecdsa_sign_inner(const secp256k1_context* ctx, secp256k1_scalar* r, secp256k1_scalar* s, int* recid, secp256k1_sha256* s2c_sha, secp256k1_ecdsa_s2c_opening *s2c_opening, const unsigned char* s2c_data32, const unsigned char *msg32, const unsigned char *seckey, secp256k1_nonce_function noncefp, const void* noncedata) { secp256k1_scalar sec, non, msg; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(ctx); int ret = 0; int is_sec_valid; unsigned char nonce32[32]; @@ -548,14 +572,11 @@ static int secp256k1_ecdsa_sign_inner(const secp256k1_context* ctx, secp256k1_sc if (recid) { *recid = 0; } - if (noncefp == NULL) { - noncefp = secp256k1_nonce_function_default; - } /* sign-to-contract commitments only work with the default nonce function, * because we need to ensure that s2c_data is actually hashed into the nonce and * not just ignored. Otherwise an attacker can exfiltrate the secret key by * signing the same message thrice with different commitments. */ - VERIFY_CHECK(s2c_data32 == NULL || noncefp == secp256k1_nonce_function_default); + VERIFY_CHECK(s2c_data32 == NULL || noncefp == NULL || noncefp == secp256k1_nonce_function_default); /* Fail if the secret key is invalid. */ is_sec_valid = secp256k1_scalar_set_b32_seckey(&sec, seckey); @@ -563,7 +584,14 @@ static int secp256k1_ecdsa_sign_inner(const secp256k1_context* ctx, secp256k1_sc secp256k1_scalar_set_b32(&msg, msg32, NULL); while (1) { int is_nonce_valid; - ret = !!noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count); + + if (noncefp == NULL) { + /* Use ctx-aware function by default */ + ret = nonce_function_rfc6979_impl(secp256k1_get_hash_context(ctx), nonce32, msg32, seckey, NULL, (void*)noncedata, count); + } else { + ret = !!noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count); + } + if (!ret) { break; } @@ -588,7 +616,7 @@ static int secp256k1_ecdsa_sign_inner(const secp256k1_context* ctx, secp256k1_sc secp256k1_declassify(ctx, &nonce_p.infinity, sizeof(nonce_p.infinity)); /* Tweak nonce with s2c commitment. */ - ret = secp256k1_ec_commit_seckey(&non, &nonce_p, s2c_sha, s2c_data32, 32); + ret = secp256k1_ec_commit_seckey(hash_ctx, &non, &nonce_p, s2c_sha, s2c_data32, 32); secp256k1_declassify(ctx, &ret, sizeof(ret)); /* may be secret that the tweak falied, but happens with negligible probability */ if (!ret) { break; @@ -807,7 +835,7 @@ int secp256k1_context_randomize(secp256k1_context* ctx, const unsigned char *see ARG_CHECK(secp256k1_context_is_proper(ctx)); if (secp256k1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) { - secp256k1_ecmult_gen_blind(&ctx->ecmult_gen_ctx, seed32); + secp256k1_ecmult_gen_blind(&ctx->ecmult_gen_ctx, secp256k1_get_hash_context(ctx), seed32); } return 1; } @@ -845,9 +873,9 @@ int secp256k1_tagged_sha256(const secp256k1_context* ctx, unsigned char *hash32, ARG_CHECK(tag != NULL); ARG_CHECK(msg != NULL); - secp256k1_sha256_initialize_tagged(&sha, tag, taglen); - secp256k1_sha256_write(&sha, msg, msglen); - secp256k1_sha256_finalize(&sha, hash32); + secp256k1_sha256_initialize_tagged(secp256k1_get_hash_context(ctx), &sha, tag, taglen); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &sha, msg, msglen); + secp256k1_sha256_finalize(secp256k1_get_hash_context(ctx), &sha, hash32); secp256k1_sha256_clear(&sha); return 1; } diff --git a/src/selftest.h b/src/selftest.h index d083ac952..de0e0597f 100644 --- a/src/selftest.h +++ b/src/selftest.h @@ -11,7 +11,8 @@ #include -static int secp256k1_selftest_sha256(void) { +static int secp256k1_selftest_sha256(secp256k1_sha256_compression_function fn_compression) { + secp256k1_hash_ctx hash_ctx; static const char *input63 = "For this sample, this 63-byte string will be used as input data"; static const unsigned char output32[32] = { 0xf0, 0x8a, 0x78, 0xcb, 0xba, 0xee, 0x08, 0x2b, 0x05, 0x2a, 0xe0, 0x70, 0x8f, 0x32, 0xfa, 0x1e, @@ -20,13 +21,15 @@ static int secp256k1_selftest_sha256(void) { unsigned char out[32]; secp256k1_sha256 hasher; secp256k1_sha256_initialize(&hasher); - secp256k1_sha256_write(&hasher, (const unsigned char*)input63, 63); - secp256k1_sha256_finalize(&hasher, out); + hash_ctx.fn_sha256_compression = fn_compression; + secp256k1_sha256_write(&hash_ctx, &hasher, (const unsigned char*)input63, 63); + secp256k1_sha256_finalize(&hash_ctx, &hasher, out); return secp256k1_memcmp_var(out, output32, 32) == 0; } static int secp256k1_selftest_passes(void) { - return secp256k1_selftest_sha256(); + /* Use default sha256 compression */ + return secp256k1_selftest_sha256(secp256k1_sha256_transform); } #endif /* SECP256K1_SELFTEST_H */ diff --git a/src/testrand_impl.h b/src/testrand_impl.h index cccc12bed..95b76160c 100644 --- a/src/testrand_impl.h +++ b/src/testrand_impl.h @@ -23,12 +23,13 @@ SECP256K1_INLINE static void testrand_seed(const unsigned char *seed16) { unsigned char out32[32]; secp256k1_sha256 hash; int i; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(secp256k1_context_static); /* Use SHA256(PREFIX || seed16) as initial state. */ secp256k1_sha256_initialize(&hash); - secp256k1_sha256_write(&hash, PREFIX, sizeof(PREFIX)); - secp256k1_sha256_write(&hash, seed16, 16); - secp256k1_sha256_finalize(&hash, out32); + secp256k1_sha256_write(hash_ctx, &hash, PREFIX, sizeof(PREFIX)); + secp256k1_sha256_write(hash_ctx, &hash, seed16, 16); + secp256k1_sha256_finalize(hash_ctx, &hash, out32); for (i = 0; i < 4; ++i) { uint64_t s = 0; int j; diff --git a/src/tests.c b/src/tests.c index c187d1734..5d5e833d1 100644 --- a/src/tests.c +++ b/src/tests.c @@ -138,7 +138,7 @@ static void run_xoshiro256pp_tests(void) { { size_t i; /* Sanity check that we run before the actual seeding. */ - for (i = 0; i < sizeof(secp256k1_test_state)/sizeof(secp256k1_test_state[0]); i++) { + for (i = 0; i < ARRAY_SIZE(secp256k1_test_state); i++) { CHECK(secp256k1_test_state[i] == 0); } } @@ -178,6 +178,7 @@ static int ecmult_gen_context_eq(const secp256k1_ecmult_gen_context *a, const se static int context_eq(const secp256k1_context *a, const secp256k1_context *b) { return a->declassify == b->declassify && ecmult_gen_context_eq(&a->ecmult_gen_ctx, &b->ecmult_gen_ctx) + && a->hash_ctx.fn_sha256_compression == b->hash_ctx.fn_sha256_compression && a->illegal_callback.fn == b->illegal_callback.fn && a->illegal_callback.data == b->illegal_callback.data && a->error_callback.fn == b->error_callback.fn @@ -192,7 +193,7 @@ static void run_deprecated_context_flags_test(void) { SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY }; secp256k1_context *none_ctx = secp256k1_context_create(SECP256K1_CONTEXT_NONE); int i; - for (i = 0; i < (int)(sizeof(flags)/sizeof(flags[0])); i++) { + for (i = 0; i < (int)(ARRAY_SIZE(flags)); i++) { secp256k1_context *tmp_ctx; CHECK(secp256k1_context_preallocated_size(SECP256K1_CONTEXT_NONE) == secp256k1_context_preallocated_size(flags[i])); tmp_ctx = secp256k1_context_create(flags[i]); @@ -478,18 +479,158 @@ static void run_scratch_tests(void) { secp256k1_scratch_space_destroy(CTX, NULL); /* no-op */ } +/* A compression function that does nothing */ +static void invalid_sha256_compression(uint32_t *s, const unsigned char *msg, size_t rounds) { + (void)s; (void)msg; (void)rounds; +} + +static int own_transform_called = 0; +static void good_sha256_compression(uint32_t *s, const unsigned char *msg, size_t rounds) { + own_transform_called = 1; + secp256k1_sha256_transform(s, msg, rounds); +} + +static void run_plug_sha256_compression_tests(void) { + secp256k1_context *ctx, *ctx_cloned; + secp256k1_sha256 sha; + unsigned char sha_out[32]; + /* 1) Verify the context is initialized with the default compression function */ + ctx = secp256k1_context_create(SECP256K1_CONTEXT_NONE); + CHECK(ctx->hash_ctx.fn_sha256_compression == secp256k1_sha256_transform); + + /* 2) Verify providing a bad compression function fails during set */ + CHECK_ILLEGAL_VOID(ctx, secp256k1_context_set_sha256_compression(ctx, invalid_sha256_compression)); + CHECK(ctx->hash_ctx.fn_sha256_compression == secp256k1_sha256_transform); + + /* 3) Provide sha256 to ctx and verify it is called when provided */ + own_transform_called = 0; + secp256k1_context_set_sha256_compression(ctx, good_sha256_compression); + CHECK(own_transform_called); + + /* 4) Verify callback makes it across clone */ + ctx_cloned = secp256k1_context_clone(ctx); + CHECK(ctx_cloned->hash_ctx.fn_sha256_compression == good_sha256_compression); + + /* 5) A hash operation should invoke the installed callback */ + own_transform_called = 0; + secp256k1_sha256_initialize(&sha); + secp256k1_sha256_write(secp256k1_get_hash_context(ctx), &sha, (const unsigned char*)"a", 1); + secp256k1_sha256_finalize(secp256k1_get_hash_context(ctx), &sha, sha_out); + CHECK(own_transform_called); + + /* 6) Unset sha256 and verify the default one is set again */ + secp256k1_context_set_sha256_compression(ctx, NULL); + CHECK(ctx->hash_ctx.fn_sha256_compression == secp256k1_sha256_transform); + + secp256k1_context_destroy(ctx); + secp256k1_context_destroy(ctx_cloned); +} + +static void run_sha256_multi_block_compression_tests(void) { + secp256k1_hash_ctx hash_ctx; + secp256k1_sha256 sha256_one; + secp256k1_sha256 sha256_two; + unsigned char out_one[32], out_two[32]; + + hash_ctx.fn_sha256_compression = secp256k1_sha256_transform; + + { /* 1) Writing one 64-byte full block vs two 32-byte blocks */ + const unsigned char data[64] = "totally serious test message to hash, definitely no random data"; + unsigned char data32[32]; + + secp256k1_sha256_initialize(&sha256_one); + secp256k1_sha256_initialize(&sha256_two); + + /* Write the 64-byte block */ + secp256k1_sha256_write(&hash_ctx, &sha256_one, data, 64); + secp256k1_sha256_finalize(&hash_ctx, &sha256_one, out_one); + + /* Write the two 32-byte blocks */ + memcpy(data32, data, 32); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data32, 32); + memcpy(data32, data + 32, 32); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data32, 32); + secp256k1_sha256_finalize(&hash_ctx, &sha256_two, out_two); + + CHECK(secp256k1_memcmp_var(out_one, out_two, 32) == 0); + } + + { /* 2) Writing one 80-byte block vs two 40-byte blocks */ + const unsigned char data[80] = "Genesis: The Times 03/Jan/2009 Chancellor on brink of second bailout for banks "; + unsigned char data40[40]; + + secp256k1_sha256_initialize(&sha256_one); + secp256k1_sha256_initialize(&sha256_two); + + /* Write the 80-byte block */ + secp256k1_sha256_write(&hash_ctx, &sha256_one, data, 80); + secp256k1_sha256_finalize(&hash_ctx, &sha256_one, out_one); + + /* Write the two 40-byte blocks */ + memcpy(data40, data, 40); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data40, 40); + memcpy(data40, data + 40, 40); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data40, 40); + secp256k1_sha256_finalize(&hash_ctx, &sha256_two, out_two); + + CHECK(secp256k1_memcmp_var(out_one, out_two, 32) == 0); + } + + { /* 3) Writing multiple consecutive full blocks in one write (128 bytes) */ + unsigned char data[128]; + unsigned char i; + for (i = 0; i < 128; i++) data[i] = i; + + secp256k1_sha256_initialize(&sha256_one); + secp256k1_sha256_initialize(&sha256_two); + + /* Single write of 128 bytes (two full 64-byte blocks) */ + secp256k1_sha256_write(&hash_ctx, &sha256_one, data, 128); + secp256k1_sha256_finalize(&hash_ctx, &sha256_one, out_one); + + /* Two separate writes of 64 bytes each */ + secp256k1_sha256_write(&hash_ctx, &sha256_two, data, 64); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data + 64, 64); + secp256k1_sha256_finalize(&hash_ctx, &sha256_two, out_two); + + CHECK(secp256k1_memcmp_var(out_one, out_two, 32) == 0); + } + + { /* 4) Mixed small + large writes in sequence */ + unsigned char data[150]; + unsigned char i; + for (i = 0; i < 150; i++) data[i] = i; + + secp256k1_sha256_initialize(&sha256_one); + secp256k1_sha256_initialize(&sha256_two); + + /* Single write of 150 bytes */ + secp256k1_sha256_write(&hash_ctx, &sha256_one, data, 150); + secp256k1_sha256_finalize(&hash_ctx, &sha256_one, out_one); + + /* Split writes: 10, 64, 64, 12 bytes */ + secp256k1_sha256_write(&hash_ctx, &sha256_two, data, 10); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data + 10, 64); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data + 74, 64); + secp256k1_sha256_write(&hash_ctx, &sha256_two, data + 138, 12); + secp256k1_sha256_finalize(&hash_ctx, &sha256_two, out_two); + + CHECK(secp256k1_memcmp_var(out_one, out_two, 32) == 0); + } +} + static void run_ctz_tests(void) { static const uint32_t b32[] = {1, 0xffffffff, 0x5e56968f, 0xe0d63129}; static const uint64_t b64[] = {1, 0xffffffffffffffff, 0xbcd02462139b3fc3, 0x98b5f80c769693ef}; int shift; unsigned i; - for (i = 0; i < sizeof(b32) / sizeof(b32[0]); ++i) { + for (i = 0; i < ARRAY_SIZE(b32); ++i) { for (shift = 0; shift < 32; ++shift) { CHECK(secp256k1_ctz32_var_debruijn(b32[i] << shift) == shift); CHECK(secp256k1_ctz32_var(b32[i] << shift) == shift); } } - for (i = 0; i < sizeof(b64) / sizeof(b64[0]); ++i) { + for (i = 0; i < ARRAY_SIZE(b64); ++i) { for (shift = 0; shift < 64; ++shift) { CHECK(secp256k1_ctz64_var_debruijn(b64[i] << shift) == shift); CHECK(secp256k1_ctz64_var(b64[i] << shift) == shift); @@ -500,6 +641,7 @@ static void run_ctz_tests(void) { /***** HASH TESTS *****/ static void run_sha256_known_output_tests(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); static const char *inputs[] = { "", "abc", "message digest", "secure hash algorithm", "SHA256 is considered to be safe", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", @@ -524,7 +666,7 @@ static void run_sha256_known_output_tests(void) { unsigned int i, ninputs; /* Skip last input vector for low iteration counts */ - ninputs = sizeof(inputs)/sizeof(inputs[0]) - 1; + ninputs = ARRAY_SIZE(inputs) - 1; CONDITIONAL_TEST(16, "run_sha256_known_output_tests 1000000") ninputs++; for (i = 0; i < ninputs; i++) { @@ -535,10 +677,10 @@ static void run_sha256_known_output_tests(void) { j = repeat[i]; secp256k1_sha256_initialize(&hasher); while (j > 0) { - secp256k1_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); + secp256k1_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); j--; } - secp256k1_sha256_finalize(&hasher, out); + secp256k1_sha256_finalize(hash_ctx, &hasher, out); CHECK(secp256k1_memcmp_var(out, outputs[i], 32) == 0); /* 2. Run: split the input bytestrings randomly before writing */ if (strlen(inputs[i]) > 0) { @@ -546,11 +688,11 @@ static void run_sha256_known_output_tests(void) { secp256k1_sha256_initialize(&hasher); j = repeat[i]; while (j > 0) { - secp256k1_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); - secp256k1_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); + secp256k1_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i]), split); + secp256k1_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); j--; } - secp256k1_sha256_finalize(&hasher, out); + secp256k1_sha256_finalize(hash_ctx, &hasher, out); CHECK(secp256k1_memcmp_var(out, outputs[i], 32) == 0); } } @@ -648,12 +790,13 @@ static void run_sha256_counter_tests(void) { {0x2c, 0xf3, 0xa9, 0xf6, 0x15, 0x25, 0x80, 0x70, 0x76, 0x99, 0x7d, 0xf1, 0xc3, 0x2f, 0xa3, 0x31, 0xff, 0x92, 0x35, 0x2e, 0x8d, 0x04, 0x13, 0x33, 0xd8, 0x0d, 0xdb, 0x4a, 0xf6, 0x8c, 0x03, 0x34}, {0xec, 0x12, 0x24, 0x9f, 0x35, 0xa4, 0x29, 0x8b, 0x9e, 0x4a, 0x95, 0xf8, 0x61, 0xaf, 0x61, 0xc5, 0x66, 0x55, 0x3e, 0x3f, 0x2a, 0x98, 0xea, 0x71, 0x16, 0x6b, 0x1c, 0xd9, 0xe4, 0x09, 0xd2, 0x8e}, }; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned int i; - for (i = 0; i < sizeof(midstates)/sizeof(midstates[0]); i++) { + for (i = 0; i < ARRAY_SIZE(midstates); i++) { unsigned char out[32]; secp256k1_sha256 hasher = midstates[i]; - secp256k1_sha256_write(&hasher, (const unsigned char*)input, strlen(input)); - secp256k1_sha256_finalize(&hasher, out); + secp256k1_sha256_write(hash_ctx, &hasher, (const unsigned char*)input, strlen(input)); + secp256k1_sha256_finalize(hash_ctx, &hasher, out); CHECK(secp256k1_memcmp_var(out, outputs[i], 32) == 0); } } @@ -670,9 +813,9 @@ static void test_sha256_eq(const secp256k1_sha256 *sha1, const secp256k1_sha256 } /* Convenience function for using test_sha256_eq to verify the correctness of a * tagged hash midstate. This function is used by some module tests. */ -static void test_sha256_tag_midstate(secp256k1_sha256 *sha_tagged, const unsigned char *tag, size_t taglen) { +static void test_sha256_tag_midstate(const secp256k1_hash_ctx *hash_ctx, secp256k1_sha256 *sha_tagged, const unsigned char *tag, size_t taglen) { secp256k1_sha256 sha; - secp256k1_sha256_initialize_tagged(&sha, tag, taglen); + secp256k1_sha256_initialize_tagged(hash_ctx, &sha, tag, taglen); test_sha256_eq(&sha, sha_tagged); } @@ -702,19 +845,20 @@ static void run_hmac_sha256_tests(void) { {0x9b, 0x09, 0xff, 0xa7, 0x1b, 0x94, 0x2f, 0xcb, 0x27, 0x63, 0x5f, 0xbc, 0xd5, 0xb0, 0xe9, 0x44, 0xbf, 0xdc, 0x63, 0x64, 0x4f, 0x07, 0x13, 0x93, 0x8a, 0x7f, 0x51, 0x53, 0x5c, 0x3a, 0x35, 0xe2} }; int i; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); for (i = 0; i < 6; i++) { secp256k1_hmac_sha256 hasher; unsigned char out[32]; - secp256k1_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); - secp256k1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); - secp256k1_hmac_sha256_finalize(&hasher, out); + secp256k1_hmac_sha256_initialize(hash_ctx, &hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); + secp256k1_hmac_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); + secp256k1_hmac_sha256_finalize(hash_ctx, &hasher, out); CHECK(secp256k1_memcmp_var(out, outputs[i], 32) == 0); if (strlen(inputs[i]) > 0) { int split = testrand_int(strlen(inputs[i])); - secp256k1_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); - secp256k1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); - secp256k1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); - secp256k1_hmac_sha256_finalize(&hasher, out); + secp256k1_hmac_sha256_initialize(hash_ctx, &hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); + secp256k1_hmac_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i]), split); + secp256k1_hmac_sha256_write(hash_ctx, &hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); + secp256k1_hmac_sha256_finalize(hash_ctx, &hasher, out); CHECK(secp256k1_memcmp_var(out, outputs[i], 32) == 0); } } @@ -735,27 +879,28 @@ static void run_rfc6979_hmac_sha256_tests(void) { {0x75, 0x97, 0x88, 0x7c, 0xbd, 0x76, 0x32, 0x1f, 0x32, 0xe3, 0x04, 0x40, 0x67, 0x9a, 0x22, 0xcf, 0x7f, 0x8d, 0x9d, 0x2e, 0xac, 0x39, 0x0e, 0x58, 0x1f, 0xea, 0x09, 0x1c, 0xe2, 0x02, 0xba, 0x94} }; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_rfc6979_hmac_sha256 rng; unsigned char out[32]; int i; - secp256k1_rfc6979_hmac_sha256_initialize(&rng, key1, 64); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, key1, 64); for (i = 0; i < 3; i++) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, out, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, out, 32); CHECK(secp256k1_memcmp_var(out, out1[i], 32) == 0); } secp256k1_rfc6979_hmac_sha256_finalize(&rng); - secp256k1_rfc6979_hmac_sha256_initialize(&rng, key1, 65); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, key1, 65); for (i = 0; i < 3; i++) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, out, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, out, 32); CHECK(secp256k1_memcmp_var(out, out1[i], 32) != 0); } secp256k1_rfc6979_hmac_sha256_finalize(&rng); - secp256k1_rfc6979_hmac_sha256_initialize(&rng, key2, 64); + secp256k1_rfc6979_hmac_sha256_initialize(hash_ctx, &rng, key2, 64); for (i = 0; i < 3; i++) { - secp256k1_rfc6979_hmac_sha256_generate(&rng, out, 32); + secp256k1_rfc6979_hmac_sha256_generate(hash_ctx, &rng, out, 32); CHECK(secp256k1_memcmp_var(out, out2[i], 32) == 0); } secp256k1_rfc6979_hmac_sha256_finalize(&rng); @@ -792,10 +937,11 @@ static void run_sha256_initialize_midstate_tests(void) { 0xa9ec59eaul, 0x9b4c2ffful, 0x400821e2ul, 0x0dcf3847ul, 0xbe7ea179ul, 0xa5772bdcul, 0x7d29bfe3ul, 0xa486b855ul }; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_sha256 sha; secp256k1_sha256_initialize_midstate(&sha, 64, midstate); - test_sha256_tag_midstate(&sha, tag, sizeof(tag) - 1); + test_sha256_tag_midstate(hash_ctx, &sha, tag, sizeof(tag) - 1); } /***** MODINV TESTS *****/ @@ -1708,7 +1854,7 @@ static void run_modinv_tests(void) { int i, j, ok; /* Test known inputs/outputs */ - for (i = 0; (size_t)i < sizeof(CASES) / sizeof(CASES[0]); ++i) { + for (i = 0; (size_t)i < ARRAY_SIZE(CASES); ++i) { uint16_t out[16]; test_modinv32_uint16(out, CASES[i][0], CASES[i][1]); for (j = 0; j < 16; ++j) CHECK(out[j] == CASES[i][2][j]); @@ -2364,7 +2510,7 @@ static void run_scalar_tests(void) { SECP256K1_SCALAR_CONST(0x7ffffffful, 0xfffffffful, 0xfffffffful, 0xfffffffful, 0xfffffffful, 0xfffffffful, 0xfffffffful, 0xfffffffful), }; unsigned n; - for (n = 0; n < sizeof(HALF_TESTS) / sizeof(HALF_TESTS[0]); ++n) { + for (n = 0; n < ARRAY_SIZE(HALF_TESTS); ++n) { secp256k1_scalar s; secp256k1_scalar_half(&s, &HALF_TESTS[n]); secp256k1_scalar_add(&s, &s, &s); @@ -3634,7 +3780,7 @@ static void run_inverse_tests(void) secp256k1_scalar x_scalar; memset(b32, 0, sizeof(b32)); /* Test fixed test cases through test_inverse_{scalar,field}, both ways. */ - for (i = 0; (size_t)i < sizeof(fe_cases)/sizeof(fe_cases[0]); ++i) { + for (i = 0; (size_t)i < ARRAY_SIZE(fe_cases); ++i) { for (var = 0; var <= 1; ++var) { test_inverse_field(&x_fe, &fe_cases[i][0], var); CHECK(fe_equal(&x_fe, &fe_cases[i][1])); @@ -3642,7 +3788,7 @@ static void run_inverse_tests(void) CHECK(fe_equal(&x_fe, &fe_cases[i][0])); } } - for (i = 0; (size_t)i < sizeof(scalar_cases)/sizeof(scalar_cases[0]); ++i) { + for (i = 0; (size_t)i < ARRAY_SIZE(scalar_cases); ++i) { for (var = 0; var <= 1; ++var) { test_inverse_scalar(&x_scalar, &scalar_cases[i][0], var); CHECK(secp256k1_scalar_eq(&x_scalar, &scalar_cases[i][1])); @@ -4241,6 +4387,7 @@ static void run_ec_combine(void) { } static void test_ec_commit(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_scalar seckey_s; secp256k1_ge pubkey; secp256k1_gej pubkeyj; @@ -4256,29 +4403,30 @@ static void test_ec_commit(void) { /* Commit to data and verify */ secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit(&commitment, &pubkey, &sha, data, 32) == 1); + CHECK(secp256k1_ec_commit(hash_ctx, &commitment, &pubkey, &sha, data, 32) == 1); secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 1); + CHECK(secp256k1_ec_commit_verify(hash_ctx, &commitment, &pubkey, &sha, data, 32) == 1); secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 1); + CHECK(secp256k1_ec_commit_seckey(hash_ctx, &seckey_s, &pubkey, &sha, data, 32) == 1); secp256k1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); secp256k1_gej_eq_ge_var(&pubkeyj, &commitment); /* Check that verification fails with different data */ secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit_verify(&commitment, &pubkey, &sha, data, 31) == 0); + CHECK(secp256k1_ec_commit_verify(hash_ctx, &commitment, &pubkey, &sha, data, 31) == 0); /* Check that commmitting fails when the inner pubkey is the point at * infinity */ secp256k1_sha256_initialize(&sha); secp256k1_ge_set_infinity(&pubkey); - CHECK(secp256k1_ec_commit(&commitment, &pubkey, &sha, data, 32) == 0); + CHECK(secp256k1_ec_commit(hash_ctx, &commitment, &pubkey, &sha, data, 32) == 0); secp256k1_scalar_set_int(&seckey_s, 0); - CHECK(secp256k1_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 0); - CHECK(secp256k1_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 0); + CHECK(secp256k1_ec_commit_seckey(hash_ctx, &seckey_s, &pubkey, &sha, data, 32) == 0); + CHECK(secp256k1_ec_commit_verify(hash_ctx, &commitment, &pubkey, &sha, data, 32) == 0); } static void test_ec_commit_api(void) { + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); unsigned char seckey[32]; secp256k1_scalar seckey_s; secp256k1_ge pubkey; @@ -4296,17 +4444,17 @@ static void test_ec_commit_api(void) { secp256k1_ge_set_gej(&pubkey, &pubkeyj); secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit(&commitment, &pubkey, &sha, data, 1) == 1); + CHECK(secp256k1_ec_commit(hash_ctx, &commitment, &pubkey, &sha, data, 1) == 1); /* The same pubkey can be both input and output of the function */ { secp256k1_ge pubkey_tmp = pubkey; secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit(&pubkey_tmp, &pubkey_tmp, &sha, data, 1) == 1); + CHECK(secp256k1_ec_commit(hash_ctx, &pubkey_tmp, &pubkey_tmp, &sha, data, 1) == 1); secp256k1_ge_eq_var(&commitment, &pubkey_tmp); } secp256k1_sha256_initialize(&sha); - CHECK(secp256k1_ec_commit_verify(&commitment, &pubkey, &sha, data, 1) == 1); + CHECK(secp256k1_ec_commit_verify(hash_ctx, &commitment, &pubkey, &sha, data, 1) == 1); } static void run_ec_commit(void) { @@ -4542,11 +4690,19 @@ static void test_point_times_order(const secp256k1_gej *point) { CHECK(secp256k1_ge_is_valid_var(&res3) == 0); /* check zero/one edge cases */ secp256k1_ecmult(&res1, point, &secp256k1_scalar_zero, &secp256k1_scalar_zero); + secp256k1_ecmult(&res2, point, &secp256k1_scalar_zero, NULL); secp256k1_ge_set_gej(&res3, &res1); + CHECK(secp256k1_gej_is_infinity(&res1)); + CHECK(secp256k1_gej_is_infinity(&res2)); CHECK(secp256k1_ge_is_infinity(&res3)); + secp256k1_ecmult(&res1, point, &secp256k1_scalar_one, &secp256k1_scalar_zero); + secp256k1_ecmult(&res2, point, &secp256k1_scalar_one, NULL); secp256k1_ge_set_gej(&res3, &res1); CHECK(secp256k1_gej_eq_ge_var(point, &res3)); + secp256k1_ge_set_gej(&res3, &res2); + CHECK(secp256k1_gej_eq_ge_var(point, &res3)); + secp256k1_ecmult(&res1, point, &secp256k1_scalar_zero, &secp256k1_scalar_one); secp256k1_ge_set_gej(&res3, &res1); CHECK(secp256k1_ge_eq_var(&secp256k1_ge_const_g, &res3)); @@ -4624,7 +4780,7 @@ static void run_ecmult_near_split_bound(void) { int i; unsigned j; for (i = 0; i < 4*COUNT; ++i) { - for (j = 0; j < sizeof(scalars_near_split_bounds) / sizeof(scalars_near_split_bounds[0]); ++j) { + for (j = 0; j < ARRAY_SIZE(scalars_near_split_bounds); ++j) { test_ecmult_target(&scalars_near_split_bounds[j], 0); test_ecmult_target(&scalars_near_split_bounds[j], 1); test_ecmult_target(&scalars_near_split_bounds[j], 2); @@ -4748,7 +4904,7 @@ static void ecmult_const_edges(void) { secp256k1_ge point; secp256k1_gej res; size_t i; - size_t cases = 1 + sizeof(scalars_near_split_bounds) / sizeof(scalars_near_split_bounds[0]); + size_t cases = 1 + ARRAY_SIZE(scalars_near_split_bounds); /* We are trying to reach the following edge cases (variables are defined as * in ecmult_const_impl.h): @@ -5642,32 +5798,33 @@ static int test_ecmult_accumulate_cb(secp256k1_scalar* sc, secp256k1_ge* pt, siz } static void test_ecmult_accumulate(secp256k1_sha256* acc, const secp256k1_scalar* x, secp256k1_scratch* scratch) { - /* Compute x*G in 6 different ways, serialize it uncompressed, and feed it into acc. */ - secp256k1_gej rj1, rj2, rj3, rj4, rj5, rj6, gj, infj; + /* Compute x*G in many different ways, serialize it uncompressed, and feed it into acc. */ + secp256k1_gej gj, infj; secp256k1_ge r; + secp256k1_gej rj[7]; unsigned char bytes[65]; + size_t i; secp256k1_gej_set_ge(&gj, &secp256k1_ge_const_g); secp256k1_gej_set_infinity(&infj); - secp256k1_ecmult_gen(&CTX->ecmult_gen_ctx, &rj1, x); - secp256k1_ecmult(&rj2, &gj, x, &secp256k1_scalar_zero); - secp256k1_ecmult(&rj3, &infj, &secp256k1_scalar_zero, x); - CHECK(secp256k1_ecmult_multi_var(&CTX->error_callback, scratch, &rj4, x, NULL, NULL, 0)); - CHECK(secp256k1_ecmult_multi_var(&CTX->error_callback, scratch, &rj5, &secp256k1_scalar_zero, test_ecmult_accumulate_cb, (void*)x, 1)); - secp256k1_ecmult_const(&rj6, &secp256k1_ge_const_g, x); - secp256k1_ge_set_gej_var(&r, &rj1); - CHECK(secp256k1_gej_eq_ge_var(&rj2, &r)); - CHECK(secp256k1_gej_eq_ge_var(&rj3, &r)); - CHECK(secp256k1_gej_eq_ge_var(&rj4, &r)); - CHECK(secp256k1_gej_eq_ge_var(&rj5, &r)); - CHECK(secp256k1_gej_eq_ge_var(&rj6, &r)); + secp256k1_ecmult_gen(&CTX->ecmult_gen_ctx, &rj[0], x); + secp256k1_ecmult(&rj[1], &gj, x, NULL); + secp256k1_ecmult(&rj[2], &gj, x, &secp256k1_scalar_zero); + secp256k1_ecmult(&rj[3], &infj, &secp256k1_scalar_zero, x); + CHECK(secp256k1_ecmult_multi_var(&CTX->error_callback, scratch, &rj[4], x, NULL, NULL, 0)); + CHECK(secp256k1_ecmult_multi_var(&CTX->error_callback, scratch, &rj[5], &secp256k1_scalar_zero, test_ecmult_accumulate_cb, (void*)x, 1)); + secp256k1_ecmult_const(&rj[6], &secp256k1_ge_const_g, x); + secp256k1_ge_set_gej_var(&r, &rj[0]); + for (i = 0; i < ARRAY_SIZE(rj); i++) { + CHECK(secp256k1_gej_eq_ge_var(&rj[i], &r)); + } if (secp256k1_ge_is_infinity(&r)) { /* Store infinity as 0x00 */ const unsigned char zerobyte[1] = {0}; - secp256k1_sha256_write(acc, zerobyte, 1); + secp256k1_sha256_write(secp256k1_get_hash_context(CTX), acc, zerobyte, 1); } else { /* Store other points using their uncompressed serialization. */ secp256k1_eckey_pubkey_serialize65(&r, bytes); - secp256k1_sha256_write(acc, bytes, sizeof(bytes)); + secp256k1_sha256_write(secp256k1_get_hash_context(CTX), acc, bytes, sizeof(bytes)); } } @@ -5709,7 +5866,7 @@ static void test_ecmult_constants_2bit(void) { test_ecmult_accumulate(&acc, &x, scratch); } } - secp256k1_sha256_finalize(&acc, b32); + secp256k1_sha256_finalize(secp256k1_get_hash_context(CTX), &acc, b32); CHECK(secp256k1_memcmp_var(b32, expected32, 32) == 0); secp256k1_scratch_space_destroy(CTX, scratch); @@ -5728,6 +5885,7 @@ static void test_ecmult_constants_sha(uint32_t prefix, size_t iter, const unsign unsigned char b32[32]; unsigned char inp[6]; size_t i; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); secp256k1_scratch_space *scratch = secp256k1_scratch_space_create(CTX, 65536); inp[0] = prefix & 0xFF; @@ -5747,12 +5905,12 @@ static void test_ecmult_constants_sha(uint32_t prefix, size_t iter, const unsign inp[4] = i & 0xff; inp[5] = (i >> 8) & 0xff; secp256k1_sha256_initialize(&gen); - secp256k1_sha256_write(&gen, inp, sizeof(inp)); - secp256k1_sha256_finalize(&gen, b32); + secp256k1_sha256_write(hash_ctx, &gen, inp, sizeof(inp)); + secp256k1_sha256_finalize(hash_ctx, &gen, b32); secp256k1_scalar_set_b32(&x, b32, NULL); test_ecmult_accumulate(&acc, &x, scratch); } - secp256k1_sha256_finalize(&acc, b32); + secp256k1_sha256_finalize(hash_ctx, &acc, b32); CHECK(secp256k1_memcmp_var(b32, expected32, 32) == 0); secp256k1_scratch_space_destroy(CTX, scratch); @@ -5806,7 +5964,7 @@ static void test_ecmult_gen_blind(void) { testrand256(seed32); b = CTX->ecmult_gen_ctx.scalar_offset; p = CTX->ecmult_gen_ctx.ge_offset; - secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, seed32); + secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, secp256k1_get_hash_context(CTX), seed32); CHECK(!secp256k1_scalar_eq(&b, &CTX->ecmult_gen_ctx.scalar_offset)); secp256k1_ecmult_gen(&CTX->ecmult_gen_ctx, &pgej2, &key); CHECK(!gej_xyz_equals_gej(&pgej, &pgej2)); @@ -5819,10 +5977,10 @@ static void test_ecmult_gen_blind_reset(void) { /* Test ecmult_gen() blinding reset and confirm that the blinding is consistent. */ secp256k1_scalar b; secp256k1_ge p1, p2; - secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); + secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, secp256k1_get_hash_context(CTX), 0); b = CTX->ecmult_gen_ctx.scalar_offset; p1 = CTX->ecmult_gen_ctx.ge_offset; - secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); + secp256k1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, secp256k1_get_hash_context(CTX), 0); CHECK(secp256k1_scalar_eq(&b, &CTX->ecmult_gen_ctx.scalar_offset)); p2 = CTX->ecmult_gen_ctx.ge_offset; CHECK(secp256k1_ge_eq_var(&p1, &p2)); @@ -5900,7 +6058,7 @@ static void run_endomorphism_tests(void) { testutil_random_scalar_order_test(&full); test_scalar_split(&full); } - for (i = 0; i < sizeof(scalars_near_split_bounds) / sizeof(scalars_near_split_bounds[0]); ++i) { + for (i = 0; i < ARRAY_SIZE(scalars_near_split_bounds); ++i) { test_scalar_split(&scalars_near_split_bounds[i]); } } @@ -7284,7 +7442,7 @@ static void run_ecdsa_der_parse(void) { } /* Tests several edge cases. */ -static void test_ecdsa_edge_cases(void) { +static void run_ecdsa_edge_cases(void) { int t; secp256k1_ecdsa_signature sig; @@ -7617,8 +7775,25 @@ static void test_ecdsa_edge_cases(void) { } } -static void run_ecdsa_edge_cases(void) { - test_ecdsa_edge_cases(); +DEFINE_SHA256_TRANSFORM_PROBE(sha256_ecdsa) +static void ecdsa_ctx_sha256(void) { + /* Check ctx-provided SHA256 compression override takes effect */ + secp256k1_context *ctx = secp256k1_context_clone(CTX); + secp256k1_ecdsa_signature out_default, out_custom; + unsigned char sk[32] = {1}, msg32[32] = {1}; + + /* Default behavior. No ctx-provided SHA256 compression */ + CHECK(secp256k1_ecdsa_sign(ctx, &out_default, msg32, sk, NULL, NULL)); + CHECK(!sha256_ecdsa_called); + + /* Override SHA256 compression directly, bypassing the ctx setter sanity checks */ + ctx->hash_ctx.fn_sha256_compression = sha256_ecdsa; + CHECK(secp256k1_ecdsa_sign(ctx, &out_custom, msg32, sk, NULL, NULL)); + CHECK(sha256_ecdsa_called); + /* Outputs must differ if custom compression was used */ + CHECK(secp256k1_memcmp_var(out_default.data, out_custom.data, 64) != 0); + + secp256k1_context_destroy(ctx); } /** Wycheproof tests @@ -7629,6 +7804,7 @@ static void test_ecdsa_wycheproof(void) { #include "wycheproof/ecdsa_secp256k1_sha256_bitcoin_test.h" int t; + const secp256k1_hash_ctx *hash_ctx = secp256k1_get_hash_context(CTX); for (t = 0; t < SECP256K1_ECDSA_WYCHEPROOF_NUMBER_TESTVECTORS; t++) { secp256k1_ecdsa_signature signature; secp256k1_sha256 hasher; @@ -7643,8 +7819,8 @@ static void test_ecdsa_wycheproof(void) { secp256k1_sha256_initialize(&hasher); msg = &wycheproof_ecdsa_messages[testvectors[t].msg_offset]; - secp256k1_sha256_write(&hasher, msg, testvectors[t].msg_len); - secp256k1_sha256_finalize(&hasher, out); + secp256k1_sha256_write(hash_ctx, &hasher, msg, testvectors[t].msg_len); + secp256k1_sha256_finalize(hash_ctx, &hasher, out); sig = &wycheproof_ecdsa_signatures[testvectors[t].sig_offset]; if (secp256k1_ecdsa_signature_parse_der(CTX, &signature, sig, testvectors[t].sig_len) == 1) { @@ -7940,6 +8116,8 @@ static const struct tf_test_entry tests_general[] = { CASE(all_static_context_tests), CASE(deprecated_context_flags_test), CASE(scratch_tests), + CASE(plug_sha256_compression_tests), + CASE(sha256_multi_block_compression_tests), }; static const struct tf_test_entry tests_integer[] = { @@ -8010,6 +8188,7 @@ static const struct tf_test_entry tests_ecdsa[] = { CASE(ecdsa_end_to_end), CASE(ecdsa_edge_cases), CASE(ecdsa_wycheproof), + CASE1(ecdsa_ctx_sha256), }; static const struct tf_test_entry tests_utils[] = { @@ -8111,7 +8290,7 @@ static int teardown(void) { int main(int argc, char **argv) { struct tf_framework tf = {0}; tf.registry_modules = registry_modules; - tf.num_modules = sizeof(registry_modules) / sizeof(registry_modules[0]); + tf.num_modules = ARRAY_SIZE(registry_modules); tf.registry_no_rng = ®istry_modules_no_rng; /* Add context creation/destruction functions */ diff --git a/src/testutil.h b/src/testutil.h index 93ee3d58b..8fa69a02c 100644 --- a/src/testutil.h +++ b/src/testutil.h @@ -11,6 +11,15 @@ #include "testrand.h" #include "util.h" +/* Helper for when we need to check that the ctx-provided sha256 compression was called */ +#define DEFINE_SHA256_TRANSFORM_PROBE(name) \ + static int name##_called = 0; \ + static void name(uint32_t *s, const unsigned char *msg, size_t rounds) { \ + name##_called = 1; \ + secp256k1_sha256_transform(s, msg, rounds); \ + s[0] ^= 0xdeadbeef; /* intentional perturbation for testing */ \ + } + /* group order of the secp256k1 curve in 32-byte big endian representation */ static const unsigned char secp256k1_group_order_bytes[32] = { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, diff --git a/src/unit_test.h b/src/unit_test.h index d3b0f1538..6f67204dc 100644 --- a/src/unit_test.h +++ b/src/unit_test.h @@ -6,6 +6,8 @@ #ifndef SECP256K1_UNIT_TEST_H #define SECP256K1_UNIT_TEST_H +#include "util.h" + /* --------------------------------------------------------- */ /* Configurable constants */ /* --------------------------------------------------------- */ @@ -27,7 +29,7 @@ #define MAKE_TEST_MODULE(name) { \ #name, \ tests_##name, \ - sizeof(tests_##name) / sizeof(tests_##name[0]) \ + ARRAY_SIZE(tests_##name) \ } /* Macro to wrap a test internal function with a COUNT loop (iterations number) */ diff --git a/src/util.h b/src/util.h index 3a468101b..712e0fe9b 100644 --- a/src/util.h +++ b/src/util.h @@ -207,6 +207,8 @@ SECP256K1_INLINE static int secp256k1_clz64_var(uint64_t x) { return ret; } +#define ARRAY_SIZE(arr) (sizeof(arr) / sizeof((arr)[0])) + /* Macro for restrict, when available and not in a VERIFY build. */ #if defined(SECP256K1_BUILD) && defined(VERIFY) # define SECP256K1_RESTRICT