Merge pull request #189 from stevenroose/fix-links
Fix the manifest links property for secp256k1-sys
This commit is contained in:
commit
80c856c091
|
@ -1,4 +1,8 @@
|
||||||
|
|
||||||
|
# 0.17.1
|
||||||
|
|
||||||
|
- Correctly prefix the secp256k1-sys links field in Cargo.toml.
|
||||||
|
|
||||||
# 0.17.0
|
# 0.17.0
|
||||||
|
|
||||||
- Move FFI into secp256k1-sys crate.
|
- Move FFI into secp256k1-sys crate.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
|
|
||||||
name = "secp256k1"
|
name = "secp256k1"
|
||||||
version = "0.17.0"
|
version = "0.17.1"
|
||||||
authors = [ "Dawid Ciężarkiewicz <dpc@ucore.info>",
|
authors = [ "Dawid Ciężarkiewicz <dpc@ucore.info>",
|
||||||
"Andrew Poelstra <apoelstra@wpsoftware.net>" ]
|
"Andrew Poelstra <apoelstra@wpsoftware.net>" ]
|
||||||
license = "CC0-1.0"
|
license = "CC0-1.0"
|
||||||
|
@ -38,7 +38,7 @@ external-symbols = ["secp256k1-sys/external-symbols"]
|
||||||
fuzztarget = ["secp256k1-sys/fuzztarget"]
|
fuzztarget = ["secp256k1-sys/fuzztarget"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
secp256k1-sys = { version = "0.1.0", default-features = false, path = "./secp256k1-sys" }
|
secp256k1-sys = { version = "0.1.1", default-features = false, path = "./secp256k1-sys" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand = "0.6"
|
rand = "0.6"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "secp256k1-sys"
|
name = "secp256k1-sys"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
authors = [ "Dawid Ciężarkiewicz <dpc@ucore.info>",
|
authors = [ "Dawid Ciężarkiewicz <dpc@ucore.info>",
|
||||||
"Andrew Poelstra <apoelstra@wpsoftware.net>",
|
"Andrew Poelstra <apoelstra@wpsoftware.net>",
|
||||||
"Steven Roose <steven@stevenroose.org>" ]
|
"Steven Roose <steven@stevenroose.org>" ]
|
||||||
|
@ -12,7 +12,7 @@ description = "FFI for Pieter Wuille's `libsecp256k1` library."
|
||||||
keywords = [ "secp256k1", "libsecp256k1", "ffi" ]
|
keywords = [ "secp256k1", "libsecp256k1", "ffi" ]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
links = "secp256k1"
|
links = "rustsecp256k1_v0_1_1"
|
||||||
|
|
||||||
# Should make docs.rs show all functions, even those behind non-default features
|
# Should make docs.rs show all functions, even those behind non-default features
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
|
|
|
@ -2,13 +2,13 @@ ACLOCAL_AMFLAGS = -I build-aux/m4
|
||||||
|
|
||||||
lib_LTLIBRARIES = libsecp256k1.la
|
lib_LTLIBRARIES = libsecp256k1.la
|
||||||
if USE_JNI
|
if USE_JNI
|
||||||
JNI_LIB = librustsecp256k1_v0_1_0_jni.la
|
JNI_LIB = librustsecp256k1_v0_1_1_jni.la
|
||||||
noinst_LTLIBRARIES = $(JNI_LIB)
|
noinst_LTLIBRARIES = $(JNI_LIB)
|
||||||
else
|
else
|
||||||
JNI_LIB =
|
JNI_LIB =
|
||||||
endif
|
endif
|
||||||
include_HEADERS = include/secp256k1.h
|
include_HEADERS = include/secp256k1.h
|
||||||
include_HEADERS += include/rustsecp256k1_v0_1_0_preallocated.h
|
include_HEADERS += include/rustsecp256k1_v0_1_1_preallocated.h
|
||||||
noinst_HEADERS =
|
noinst_HEADERS =
|
||||||
noinst_HEADERS += src/scalar.h
|
noinst_HEADERS += src/scalar.h
|
||||||
noinst_HEADERS += src/scalar_4x64.h
|
noinst_HEADERS += src/scalar_4x64.h
|
||||||
|
@ -58,7 +58,7 @@ noinst_HEADERS += contrib/lax_der_privatekey_parsing.h
|
||||||
noinst_HEADERS += contrib/lax_der_privatekey_parsing.c
|
noinst_HEADERS += contrib/lax_der_privatekey_parsing.c
|
||||||
|
|
||||||
if USE_EXTERNAL_ASM
|
if USE_EXTERNAL_ASM
|
||||||
COMMON_LIB = librustsecp256k1_v0_1_0_common.la
|
COMMON_LIB = librustsecp256k1_v0_1_1_common.la
|
||||||
noinst_LTLIBRARIES = $(COMMON_LIB)
|
noinst_LTLIBRARIES = $(COMMON_LIB)
|
||||||
else
|
else
|
||||||
COMMON_LIB =
|
COMMON_LIB =
|
||||||
|
@ -69,16 +69,16 @@ pkgconfig_DATA = libsecp256k1.pc
|
||||||
|
|
||||||
if USE_EXTERNAL_ASM
|
if USE_EXTERNAL_ASM
|
||||||
if USE_ASM_ARM
|
if USE_ASM_ARM
|
||||||
librustsecp256k1_v0_1_0_common_la_SOURCES = src/asm/field_10x26_arm.s
|
librustsecp256k1_v0_1_1_common_la_SOURCES = src/asm/field_10x26_arm.s
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
librustsecp256k1_v0_1_0_la_SOURCES = src/secp256k1.c
|
librustsecp256k1_v0_1_1_la_SOURCES = src/secp256k1.c
|
||||||
librustsecp256k1_v0_1_0_la_CPPFLAGS = -DSECP256K1_BUILD -I$(top_srcdir)/include -I$(top_srcdir)/src $(SECP_INCLUDES)
|
librustsecp256k1_v0_1_1_la_CPPFLAGS = -DSECP256K1_BUILD -I$(top_srcdir)/include -I$(top_srcdir)/src $(SECP_INCLUDES)
|
||||||
librustsecp256k1_v0_1_0_la_LIBADD = $(JNI_LIB) $(SECP_LIBS) $(COMMON_LIB)
|
librustsecp256k1_v0_1_1_la_LIBADD = $(JNI_LIB) $(SECP_LIBS) $(COMMON_LIB)
|
||||||
|
|
||||||
librustsecp256k1_v0_1_0_jni_la_SOURCES = src/java/org_bitcoin_NativeSecp256k1.c src/java/org_bitcoin_Secp256k1Context.c
|
librustsecp256k1_v0_1_1_jni_la_SOURCES = src/java/org_bitcoin_NativeSecp256k1.c src/java/org_bitcoin_Secp256k1Context.c
|
||||||
librustsecp256k1_v0_1_0_jni_la_CPPFLAGS = -DSECP256K1_BUILD $(JNI_INCLUDES)
|
librustsecp256k1_v0_1_1_jni_la_CPPFLAGS = -DSECP256K1_BUILD $(JNI_INCLUDES)
|
||||||
|
|
||||||
noinst_PROGRAMS =
|
noinst_PROGRAMS =
|
||||||
if USE_BENCHMARK
|
if USE_BENCHMARK
|
||||||
|
@ -161,7 +161,7 @@ gen_%.o: src/gen_%.c
|
||||||
$(gen_context_BIN): $(gen_context_OBJECTS)
|
$(gen_context_BIN): $(gen_context_OBJECTS)
|
||||||
$(CC_FOR_BUILD) $(CFLAGS_FOR_BUILD) $(LDFLAGS_FOR_BUILD) $^ -o $@
|
$(CC_FOR_BUILD) $(CFLAGS_FOR_BUILD) $(LDFLAGS_FOR_BUILD) $^ -o $@
|
||||||
|
|
||||||
$(librustsecp256k1_v0_1_0_la_OBJECTS): src/ecmult_static_context.h
|
$(librustsecp256k1_v0_1_1_la_OBJECTS): src/ecmult_static_context.h
|
||||||
$(tests_OBJECTS): src/ecmult_static_context.h
|
$(tests_OBJECTS): src/ecmult_static_context.h
|
||||||
$(bench_internal_OBJECTS): src/ecmult_static_context.h
|
$(bench_internal_OBJECTS): src/ecmult_static_context.h
|
||||||
$(bench_ecmult_OBJECTS): src/ecmult_static_context.h
|
$(bench_ecmult_OBJECTS): src/ecmult_static_context.h
|
||||||
|
|
|
@ -140,7 +140,7 @@ AC_ARG_ENABLE(external_default_callbacks,
|
||||||
[use_external_default_callbacks=no])
|
[use_external_default_callbacks=no])
|
||||||
|
|
||||||
AC_ARG_ENABLE(jni,
|
AC_ARG_ENABLE(jni,
|
||||||
AS_HELP_STRING([--enable-jni],[enable librustsecp256k1_v0_1_0_jni [default=no]]),
|
AS_HELP_STRING([--enable-jni],[enable librustsecp256k1_v0_1_1_jni [default=no]]),
|
||||||
[use_jni=$enableval],
|
[use_jni=$enableval],
|
||||||
[use_jni=no])
|
[use_jni=no])
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
#include "lax_der_parsing.h"
|
#include "lax_der_parsing.h"
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der_lax(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_parse_der_lax(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) {
|
||||||
size_t rpos, rlen, spos, slen;
|
size_t rpos, rlen, spos, slen;
|
||||||
size_t pos = 0;
|
size_t pos = 0;
|
||||||
size_t lenbyte;
|
size_t lenbyte;
|
||||||
|
@ -17,7 +17,7 @@ int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der_lax(const rustsecp256k1_v0_1_
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
|
|
||||||
/* Hack to initialize sig with a correctly-parsed but invalid signature. */
|
/* Hack to initialize sig with a correctly-parsed but invalid signature. */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
||||||
|
|
||||||
/* Sequence tag byte */
|
/* Sequence tag byte */
|
||||||
if (pos == inputlen || input[pos] != 0x30) {
|
if (pos == inputlen || input[pos] != 0x30) {
|
||||||
|
@ -139,11 +139,11 @@ int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der_lax(const rustsecp256k1_v0_1_
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!overflow) {
|
if (!overflow) {
|
||||||
overflow = !rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
overflow = !rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
||||||
}
|
}
|
||||||
if (overflow) {
|
if (overflow) {
|
||||||
memset(tmpsig, 0, 64);
|
memset(tmpsig, 0, 64);
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig);
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,8 +26,8 @@
|
||||||
* certain violations are easily supported. You may need to adapt it.
|
* certain violations are easily supported. You may need to adapt it.
|
||||||
*
|
*
|
||||||
* Do not use this for new systems. Use well-defined DER or compact signatures
|
* Do not use this for new systems. Use well-defined DER or compact signatures
|
||||||
* instead if you have the choice (see rustsecp256k1_v0_1_0_ecdsa_signature_parse_der and
|
* instead if you have the choice (see rustsecp256k1_v0_1_1_ecdsa_signature_parse_der and
|
||||||
* rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact).
|
* rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact).
|
||||||
*
|
*
|
||||||
* The supported violations are:
|
* The supported violations are:
|
||||||
* - All numbers are parsed as nonnegative integers, even though X.609-0207
|
* - All numbers are parsed as nonnegative integers, even though X.609-0207
|
||||||
|
@ -77,9 +77,9 @@ extern "C" {
|
||||||
* encoded numbers are out of range, signature validation with it is
|
* encoded numbers are out of range, signature validation with it is
|
||||||
* guaranteed to fail for every message and public key.
|
* guaranteed to fail for every message and public key.
|
||||||
*/
|
*/
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der_lax(
|
int rustsecp256k1_v0_1_1_ecdsa_signature_parse_der_lax(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature* sig,
|
rustsecp256k1_v0_1_1_ecdsa_signature* sig,
|
||||||
const unsigned char *input,
|
const unsigned char *input,
|
||||||
size_t inputlen
|
size_t inputlen
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
#include "lax_der_privatekey_parsing.h"
|
#include "lax_der_privatekey_parsing.h"
|
||||||
|
|
||||||
int ec_privkey_import_der(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *out32, const unsigned char *privkey, size_t privkeylen) {
|
int ec_privkey_import_der(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *out32, const unsigned char *privkey, size_t privkeylen) {
|
||||||
const unsigned char *end = privkey + privkeylen;
|
const unsigned char *end = privkey + privkeylen;
|
||||||
int lenb = 0;
|
int lenb = 0;
|
||||||
int len = 0;
|
int len = 0;
|
||||||
|
@ -46,17 +46,17 @@ int ec_privkey_import_der(const rustsecp256k1_v0_1_0_context* ctx, unsigned char
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
memcpy(out32 + 32 - privkey[1], privkey + 2, privkey[1]);
|
memcpy(out32 + 32 - privkey[1], privkey + 2, privkey[1]);
|
||||||
if (!rustsecp256k1_v0_1_0_ec_seckey_verify(ctx, out32)) {
|
if (!rustsecp256k1_v0_1_1_ec_seckey_verify(ctx, out32)) {
|
||||||
memset(out32, 0, 32);
|
memset(out32, 0, 32);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int ec_privkey_export_der(const rustsecp256k1_v0_1_0_context *ctx, unsigned char *privkey, size_t *privkeylen, const unsigned char *key32, int compressed) {
|
int ec_privkey_export_der(const rustsecp256k1_v0_1_1_context *ctx, unsigned char *privkey, size_t *privkeylen, const unsigned char *key32, int compressed) {
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
size_t pubkeylen = 0;
|
size_t pubkeylen = 0;
|
||||||
if (!rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &pubkey, key32)) {
|
if (!rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &pubkey, key32)) {
|
||||||
*privkeylen = 0;
|
*privkeylen = 0;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ int ec_privkey_export_der(const rustsecp256k1_v0_1_0_context *ctx, unsigned char
|
||||||
memcpy(ptr, key32, 32); ptr += 32;
|
memcpy(ptr, key32, 32); ptr += 32;
|
||||||
memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle);
|
memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle);
|
||||||
pubkeylen = 33;
|
pubkeylen = 33;
|
||||||
rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED);
|
rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED);
|
||||||
ptr += pubkeylen;
|
ptr += pubkeylen;
|
||||||
*privkeylen = ptr - privkey;
|
*privkeylen = ptr - privkey;
|
||||||
} else {
|
} else {
|
||||||
|
@ -105,7 +105,7 @@ int ec_privkey_export_der(const rustsecp256k1_v0_1_0_context *ctx, unsigned char
|
||||||
memcpy(ptr, key32, 32); ptr += 32;
|
memcpy(ptr, key32, 32); ptr += 32;
|
||||||
memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle);
|
memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle);
|
||||||
pubkeylen = 65;
|
pubkeylen = 65;
|
||||||
rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_UNCOMPRESSED);
|
rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_UNCOMPRESSED);
|
||||||
ptr += pubkeylen;
|
ptr += pubkeylen;
|
||||||
*privkeylen = ptr - privkey;
|
*privkeylen = ptr - privkey;
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,10 +52,10 @@ extern "C" {
|
||||||
* simple 32-byte private keys are sufficient.
|
* simple 32-byte private keys are sufficient.
|
||||||
*
|
*
|
||||||
* Note that this function does not guarantee correct DER output. It is
|
* Note that this function does not guarantee correct DER output. It is
|
||||||
* guaranteed to be parsable by rustsecp256k1_v0_1_0_ec_privkey_import_der
|
* guaranteed to be parsable by rustsecp256k1_v0_1_1_ec_privkey_import_der
|
||||||
*/
|
*/
|
||||||
SECP256K1_WARN_UNUSED_RESULT int ec_privkey_export_der(
|
SECP256K1_WARN_UNUSED_RESULT int ec_privkey_export_der(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *privkey,
|
unsigned char *privkey,
|
||||||
size_t *privkeylen,
|
size_t *privkeylen,
|
||||||
const unsigned char *seckey,
|
const unsigned char *seckey,
|
||||||
|
@ -77,7 +77,7 @@ SECP256K1_WARN_UNUSED_RESULT int ec_privkey_export_der(
|
||||||
* key.
|
* key.
|
||||||
*/
|
*/
|
||||||
SECP256K1_WARN_UNUSED_RESULT int ec_privkey_import_der(
|
SECP256K1_WARN_UNUSED_RESULT int ec_privkey_import_der(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *seckey,
|
unsigned char *seckey,
|
||||||
const unsigned char *privkey,
|
const unsigned char *privkey,
|
||||||
size_t privkeylen
|
size_t privkeylen
|
||||||
|
|
|
@ -35,13 +35,13 @@ extern "C" {
|
||||||
* A constructed context can safely be used from multiple threads
|
* A constructed context can safely be used from multiple threads
|
||||||
* simultaneously, but API calls that take a non-const pointer to a context
|
* simultaneously, but API calls that take a non-const pointer to a context
|
||||||
* need exclusive access to it. In particular this is the case for
|
* need exclusive access to it. In particular this is the case for
|
||||||
* rustsecp256k1_v0_1_0_context_destroy, rustsecp256k1_v0_1_0_context_preallocated_destroy,
|
* rustsecp256k1_v0_1_1_context_destroy, rustsecp256k1_v0_1_1_context_preallocated_destroy,
|
||||||
* and rustsecp256k1_v0_1_0_context_randomize.
|
* and rustsecp256k1_v0_1_1_context_randomize.
|
||||||
*
|
*
|
||||||
* Regarding randomization, either do it once at creation time (in which case
|
* Regarding randomization, either do it once at creation time (in which case
|
||||||
* you do not need any locking for the other calls), or use a read-write lock.
|
* you do not need any locking for the other calls), or use a read-write lock.
|
||||||
*/
|
*/
|
||||||
typedef struct rustsecp256k1_v0_1_0_context_struct rustsecp256k1_v0_1_0_context;
|
typedef struct rustsecp256k1_v0_1_1_context_struct rustsecp256k1_v0_1_1_context;
|
||||||
|
|
||||||
/** Opaque data structure that holds rewriteable "scratch space"
|
/** Opaque data structure that holds rewriteable "scratch space"
|
||||||
*
|
*
|
||||||
|
@ -54,7 +54,7 @@ typedef struct rustsecp256k1_v0_1_0_context_struct rustsecp256k1_v0_1_0_context;
|
||||||
* Unlike the context object, this cannot safely be shared between threads
|
* Unlike the context object, this cannot safely be shared between threads
|
||||||
* without additional synchronization logic.
|
* without additional synchronization logic.
|
||||||
*/
|
*/
|
||||||
typedef struct rustsecp256k1_v0_1_0_scratch_space_struct rustsecp256k1_v0_1_0_scratch_space;
|
typedef struct rustsecp256k1_v0_1_1_scratch_space_struct rustsecp256k1_v0_1_1_scratch_space;
|
||||||
|
|
||||||
/** Opaque data structure that holds a parsed and valid public key.
|
/** Opaque data structure that holds a parsed and valid public key.
|
||||||
*
|
*
|
||||||
|
@ -62,11 +62,11 @@ typedef struct rustsecp256k1_v0_1_0_scratch_space_struct rustsecp256k1_v0_1_0_sc
|
||||||
* guaranteed to be portable between different platforms or versions. It is
|
* guaranteed to be portable between different platforms or versions. It is
|
||||||
* however guaranteed to be 64 bytes in size, and can be safely copied/moved.
|
* however guaranteed to be 64 bytes in size, and can be safely copied/moved.
|
||||||
* If you need to convert to a format suitable for storage, transmission, or
|
* If you need to convert to a format suitable for storage, transmission, or
|
||||||
* comparison, use rustsecp256k1_v0_1_0_ec_pubkey_serialize and rustsecp256k1_v0_1_0_ec_pubkey_parse.
|
* comparison, use rustsecp256k1_v0_1_1_ec_pubkey_serialize and rustsecp256k1_v0_1_1_ec_pubkey_parse.
|
||||||
*/
|
*/
|
||||||
typedef struct {
|
typedef struct {
|
||||||
unsigned char data[64];
|
unsigned char data[64];
|
||||||
} rustsecp256k1_v0_1_0_pubkey;
|
} rustsecp256k1_v0_1_1_pubkey;
|
||||||
|
|
||||||
/** Opaque data structured that holds a parsed ECDSA signature.
|
/** Opaque data structured that holds a parsed ECDSA signature.
|
||||||
*
|
*
|
||||||
|
@ -74,12 +74,12 @@ typedef struct {
|
||||||
* guaranteed to be portable between different platforms or versions. It is
|
* guaranteed to be portable between different platforms or versions. It is
|
||||||
* however guaranteed to be 64 bytes in size, and can be safely copied/moved.
|
* however guaranteed to be 64 bytes in size, and can be safely copied/moved.
|
||||||
* If you need to convert to a format suitable for storage, transmission, or
|
* If you need to convert to a format suitable for storage, transmission, or
|
||||||
* comparison, use the rustsecp256k1_v0_1_0_ecdsa_signature_serialize_* and
|
* comparison, use the rustsecp256k1_v0_1_1_ecdsa_signature_serialize_* and
|
||||||
* rustsecp256k1_v0_1_0_ecdsa_signature_parse_* functions.
|
* rustsecp256k1_v0_1_1_ecdsa_signature_parse_* functions.
|
||||||
*/
|
*/
|
||||||
typedef struct {
|
typedef struct {
|
||||||
unsigned char data[64];
|
unsigned char data[64];
|
||||||
} rustsecp256k1_v0_1_0_ecdsa_signature;
|
} rustsecp256k1_v0_1_1_ecdsa_signature;
|
||||||
|
|
||||||
/** A pointer to a function to deterministically generate a nonce.
|
/** A pointer to a function to deterministically generate a nonce.
|
||||||
*
|
*
|
||||||
|
@ -97,7 +97,7 @@ typedef struct {
|
||||||
* Except for test cases, this function should compute some cryptographic hash of
|
* Except for test cases, this function should compute some cryptographic hash of
|
||||||
* the message, the algorithm, the key and the attempt.
|
* the message, the algorithm, the key and the attempt.
|
||||||
*/
|
*/
|
||||||
typedef int (*rustsecp256k1_v0_1_0_nonce_function)(
|
typedef int (*rustsecp256k1_v0_1_1_nonce_function)(
|
||||||
unsigned char *nonce32,
|
unsigned char *nonce32,
|
||||||
const unsigned char *msg32,
|
const unsigned char *msg32,
|
||||||
const unsigned char *key32,
|
const unsigned char *key32,
|
||||||
|
@ -164,13 +164,13 @@ typedef int (*rustsecp256k1_v0_1_0_nonce_function)(
|
||||||
#define SECP256K1_FLAGS_BIT_CONTEXT_SIGN (1 << 9)
|
#define SECP256K1_FLAGS_BIT_CONTEXT_SIGN (1 << 9)
|
||||||
#define SECP256K1_FLAGS_BIT_COMPRESSION (1 << 8)
|
#define SECP256K1_FLAGS_BIT_COMPRESSION (1 << 8)
|
||||||
|
|
||||||
/** Flags to pass to rustsecp256k1_v0_1_0_context_create, rustsecp256k1_v0_1_0_context_preallocated_size, and
|
/** Flags to pass to rustsecp256k1_v0_1_1_context_create, rustsecp256k1_v0_1_1_context_preallocated_size, and
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_create. */
|
* rustsecp256k1_v0_1_1_context_preallocated_create. */
|
||||||
#define SECP256K1_CONTEXT_VERIFY (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_VERIFY)
|
#define SECP256K1_CONTEXT_VERIFY (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_VERIFY)
|
||||||
#define SECP256K1_CONTEXT_SIGN (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_SIGN)
|
#define SECP256K1_CONTEXT_SIGN (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_SIGN)
|
||||||
#define SECP256K1_CONTEXT_NONE (SECP256K1_FLAGS_TYPE_CONTEXT)
|
#define SECP256K1_CONTEXT_NONE (SECP256K1_FLAGS_TYPE_CONTEXT)
|
||||||
|
|
||||||
/** Flag to pass to rustsecp256k1_v0_1_0_ec_pubkey_serialize and rustsecp256k1_v0_1_0_ec_privkey_export. */
|
/** Flag to pass to rustsecp256k1_v0_1_1_ec_pubkey_serialize and rustsecp256k1_v0_1_1_ec_privkey_export. */
|
||||||
#define SECP256K1_EC_COMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BIT_COMPRESSION)
|
#define SECP256K1_EC_COMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BIT_COMPRESSION)
|
||||||
#define SECP256K1_EC_UNCOMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION)
|
#define SECP256K1_EC_UNCOMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION)
|
||||||
|
|
||||||
|
@ -186,25 +186,25 @@ typedef int (*rustsecp256k1_v0_1_0_nonce_function)(
|
||||||
* API consistency, but currently do not require expensive precomputations or dynamic
|
* API consistency, but currently do not require expensive precomputations or dynamic
|
||||||
* allocations.
|
* allocations.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API extern const rustsecp256k1_v0_1_0_context *rustsecp256k1_v0_1_0_context_no_precomp;
|
SECP256K1_API extern const rustsecp256k1_v0_1_1_context *rustsecp256k1_v0_1_1_context_no_precomp;
|
||||||
|
|
||||||
/** Create a secp256k1 context object (in dynamically allocated memory).
|
/** Create a secp256k1 context object (in dynamically allocated memory).
|
||||||
*
|
*
|
||||||
* This function uses malloc to allocate memory. It is guaranteed that malloc is
|
* This function uses malloc to allocate memory. It is guaranteed that malloc is
|
||||||
* called at most once for every call of this function. If you need to avoid dynamic
|
* called at most once for every call of this function. If you need to avoid dynamic
|
||||||
* memory allocation entirely, see the functions in rustsecp256k1_v0_1_0_preallocated.h.
|
* memory allocation entirely, see the functions in rustsecp256k1_v0_1_1_preallocated.h.
|
||||||
*
|
*
|
||||||
* Returns: a newly created context object.
|
* Returns: a newly created context object.
|
||||||
* In: flags: which parts of the context to initialize.
|
* In: flags: which parts of the context to initialize.
|
||||||
*
|
*
|
||||||
* See also rustsecp256k1_v0_1_0_context_randomize.
|
* See also rustsecp256k1_v0_1_1_context_randomize.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** Copy a secp256k1 context object (into dynamically allocated memory).
|
/** Copy a secp256k1 context object (into dynamically allocated memory).
|
||||||
*
|
*
|
||||||
* This function uses malloc to allocate memory. It is guaranteed that malloc is
|
* This function uses malloc to allocate memory. It is guaranteed that malloc is
|
||||||
* called at most once for every call of this function. If you need to avoid dynamic
|
* called at most once for every call of this function. If you need to avoid dynamic
|
||||||
* memory allocation entirely, see the functions in rustsecp256k1_v0_1_0_preallocated.h.
|
* memory allocation entirely, see the functions in rustsecp256k1_v0_1_1_preallocated.h.
|
||||||
*
|
*
|
||||||
* Returns: a newly created context object.
|
* Returns: a newly created context object.
|
||||||
* Args: ctx: an existing context to copy (cannot be NULL)
|
* Args: ctx: an existing context to copy (cannot be NULL)
|
||||||
|
@ -214,14 +214,14 @@ SECP256K1_API extern const rustsecp256k1_v0_1_0_context *rustsecp256k1_v0_1_0_co
|
||||||
*
|
*
|
||||||
* The context pointer may not be used afterwards.
|
* The context pointer may not be used afterwards.
|
||||||
*
|
*
|
||||||
* The context to destroy must have been created using rustsecp256k1_v0_1_0_context_create
|
* The context to destroy must have been created using rustsecp256k1_v0_1_1_context_create
|
||||||
* or rustsecp256k1_v0_1_0_context_clone. If the context has instead been created using
|
* or rustsecp256k1_v0_1_1_context_clone. If the context has instead been created using
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_create or rustsecp256k1_v0_1_0_context_preallocated_clone, the
|
* rustsecp256k1_v0_1_1_context_preallocated_create or rustsecp256k1_v0_1_1_context_preallocated_clone, the
|
||||||
* behaviour is undefined. In that case, rustsecp256k1_v0_1_0_context_preallocated_destroy must
|
* behaviour is undefined. In that case, rustsecp256k1_v0_1_1_context_preallocated_destroy must
|
||||||
* be used instead.
|
* be used instead.
|
||||||
*
|
*
|
||||||
* Args: ctx: an existing context to destroy, constructed using
|
* Args: ctx: an existing context to destroy, constructed using
|
||||||
* rustsecp256k1_v0_1_0_context_create or rustsecp256k1_v0_1_0_context_clone
|
* rustsecp256k1_v0_1_1_context_create or rustsecp256k1_v0_1_1_context_clone
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** Set a callback function to be called when an illegal argument is passed to
|
/** Set a callback function to be called when an illegal argument is passed to
|
||||||
|
@ -245,11 +245,11 @@ SECP256K1_API extern const rustsecp256k1_v0_1_0_context *rustsecp256k1_v0_1_0_co
|
||||||
* USE_EXTERNAL_DEFAULT_CALLBACKS is defined, which is the case if the build
|
* USE_EXTERNAL_DEFAULT_CALLBACKS is defined, which is the case if the build
|
||||||
* has been configured with --enable-external-default-callbacks. Then the
|
* has been configured with --enable-external-default-callbacks. Then the
|
||||||
* following two symbols must be provided to link against:
|
* following two symbols must be provided to link against:
|
||||||
* - void rustsecp256k1_v0_1_0_default_illegal_callback_fn(const char* message, void* data);
|
* - void rustsecp256k1_v0_1_1_default_illegal_callback_fn(const char* message, void* data);
|
||||||
* - void rustsecp256k1_v0_1_0_default_error_callback_fn(const char* message, void* data);
|
* - void rustsecp256k1_v0_1_1_default_error_callback_fn(const char* message, void* data);
|
||||||
* The library can call these default handlers even before a proper callback data
|
* The library can call these default handlers even before a proper callback data
|
||||||
* pointer could have been set using rustsecp256k1_v0_1_0_context_set_illegal_callback or
|
* pointer could have been set using rustsecp256k1_v0_1_1_context_set_illegal_callback or
|
||||||
* rustsecp256k1_v0_1_0_context_set_illegal_callback, e.g., when the creation of a context
|
* rustsecp256k1_v0_1_1_context_set_illegal_callback, e.g., when the creation of a context
|
||||||
* fails. In this case, the corresponding default handler will be called with
|
* fails. In this case, the corresponding default handler will be called with
|
||||||
* the data pointer argument set to NULL.
|
* the data pointer argument set to NULL.
|
||||||
*
|
*
|
||||||
|
@ -259,10 +259,10 @@ SECP256K1_API extern const rustsecp256k1_v0_1_0_context *rustsecp256k1_v0_1_0_co
|
||||||
* (NULL restores the default handler.)
|
* (NULL restores the default handler.)
|
||||||
* data: the opaque pointer to pass to fun above.
|
* data: the opaque pointer to pass to fun above.
|
||||||
*
|
*
|
||||||
* See also rustsecp256k1_v0_1_0_context_set_error_callback.
|
* See also rustsecp256k1_v0_1_1_context_set_error_callback.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API void rustsecp256k1_v0_1_0_context_set_illegal_callback(
|
SECP256K1_API void rustsecp256k1_v0_1_1_context_set_illegal_callback(
|
||||||
rustsecp256k1_v0_1_0_context* ctx,
|
rustsecp256k1_v0_1_1_context* ctx,
|
||||||
void (*fun)(const char* message, void* data),
|
void (*fun)(const char* message, void* data),
|
||||||
const void* data
|
const void* data
|
||||||
) SECP256K1_ARG_NONNULL(1);
|
) SECP256K1_ARG_NONNULL(1);
|
||||||
|
@ -273,21 +273,21 @@ SECP256K1_API void rustsecp256k1_v0_1_0_context_set_illegal_callback(
|
||||||
* This can only trigger in case of a hardware failure, miscompilation,
|
* This can only trigger in case of a hardware failure, miscompilation,
|
||||||
* memory corruption, serious bug in the library, or other error would can
|
* memory corruption, serious bug in the library, or other error would can
|
||||||
* otherwise result in undefined behaviour. It will not trigger due to mere
|
* otherwise result in undefined behaviour. It will not trigger due to mere
|
||||||
* incorrect usage of the API (see rustsecp256k1_v0_1_0_context_set_illegal_callback
|
* incorrect usage of the API (see rustsecp256k1_v0_1_1_context_set_illegal_callback
|
||||||
* for that). After this callback returns, anything may happen, including
|
* for that). After this callback returns, anything may happen, including
|
||||||
* crashing.
|
* crashing.
|
||||||
*
|
*
|
||||||
* Args: ctx: an existing context object (cannot be NULL)
|
* Args: ctx: an existing context object (cannot be NULL)
|
||||||
* In: fun: a pointer to a function to call when an internal error occurs,
|
* In: fun: a pointer to a function to call when an internal error occurs,
|
||||||
* taking a message and an opaque pointer (NULL restores the
|
* taking a message and an opaque pointer (NULL restores the
|
||||||
* default handler, see rustsecp256k1_v0_1_0_context_set_illegal_callback
|
* default handler, see rustsecp256k1_v0_1_1_context_set_illegal_callback
|
||||||
* for details).
|
* for details).
|
||||||
* data: the opaque pointer to pass to fun above.
|
* data: the opaque pointer to pass to fun above.
|
||||||
*
|
*
|
||||||
* See also rustsecp256k1_v0_1_0_context_set_illegal_callback.
|
* See also rustsecp256k1_v0_1_1_context_set_illegal_callback.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API void rustsecp256k1_v0_1_0_context_set_error_callback(
|
SECP256K1_API void rustsecp256k1_v0_1_1_context_set_error_callback(
|
||||||
rustsecp256k1_v0_1_0_context* ctx,
|
rustsecp256k1_v0_1_1_context* ctx,
|
||||||
void (*fun)(const char* message, void* data),
|
void (*fun)(const char* message, void* data),
|
||||||
const void* data
|
const void* data
|
||||||
) SECP256K1_ARG_NONNULL(1);
|
) SECP256K1_ARG_NONNULL(1);
|
||||||
|
@ -321,9 +321,9 @@ SECP256K1_API void rustsecp256k1_v0_1_0_context_set_error_callback(
|
||||||
* 0x03), uncompressed (65 bytes, header byte 0x04), or hybrid (65 bytes, header
|
* 0x03), uncompressed (65 bytes, header byte 0x04), or hybrid (65 bytes, header
|
||||||
* byte 0x06 or 0x07) format public keys.
|
* byte 0x06 or 0x07) format public keys.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_parse(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_parse(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey* pubkey,
|
rustsecp256k1_v0_1_1_pubkey* pubkey,
|
||||||
const unsigned char *input,
|
const unsigned char *input,
|
||||||
size_t inputlen
|
size_t inputlen
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
@ -338,16 +338,16 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_pa
|
||||||
* In/Out: outputlen: a pointer to an integer which is initially set to the
|
* In/Out: outputlen: a pointer to an integer which is initially set to the
|
||||||
* size of output, and is overwritten with the written
|
* size of output, and is overwritten with the written
|
||||||
* size.
|
* size.
|
||||||
* In: pubkey: a pointer to a rustsecp256k1_v0_1_0_pubkey containing an
|
* In: pubkey: a pointer to a rustsecp256k1_v0_1_1_pubkey containing an
|
||||||
* initialized public key.
|
* initialized public key.
|
||||||
* flags: SECP256K1_EC_COMPRESSED if serialization should be in
|
* flags: SECP256K1_EC_COMPRESSED if serialization should be in
|
||||||
* compressed format, otherwise SECP256K1_EC_UNCOMPRESSED.
|
* compressed format, otherwise SECP256K1_EC_UNCOMPRESSED.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ec_pubkey_serialize(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ec_pubkey_serialize(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *output,
|
unsigned char *output,
|
||||||
size_t *outputlen,
|
size_t *outputlen,
|
||||||
const rustsecp256k1_v0_1_0_pubkey* pubkey,
|
const rustsecp256k1_v0_1_1_pubkey* pubkey,
|
||||||
unsigned int flags
|
unsigned int flags
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
|
@ -366,9 +366,9 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ec_pubkey_serialize(
|
||||||
* S are zero, the resulting sig value is guaranteed to fail validation for any
|
* S are zero, the resulting sig value is guaranteed to fail validation for any
|
||||||
* message and public key.
|
* message and public key.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature* sig,
|
rustsecp256k1_v0_1_1_ecdsa_signature* sig,
|
||||||
const unsigned char *input64
|
const unsigned char *input64
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
|
@ -387,9 +387,9 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(
|
||||||
* encoded numbers are out of range, signature validation with it is
|
* encoded numbers are out of range, signature validation with it is
|
||||||
* guaranteed to fail for every message and public key.
|
* guaranteed to fail for every message and public key.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature* sig,
|
rustsecp256k1_v0_1_1_ecdsa_signature* sig,
|
||||||
const unsigned char *input,
|
const unsigned char *input,
|
||||||
size_t inputlen
|
size_t inputlen
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
@ -405,11 +405,11 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(
|
||||||
* if 0 was returned).
|
* if 0 was returned).
|
||||||
* In: sig: a pointer to an initialized signature object
|
* In: sig: a pointer to an initialized signature object
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *output,
|
unsigned char *output,
|
||||||
size_t *outputlen,
|
size_t *outputlen,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_signature* sig
|
const rustsecp256k1_v0_1_1_ecdsa_signature* sig
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
/** Serialize an ECDSA signature in compact (64 byte) format.
|
/** Serialize an ECDSA signature in compact (64 byte) format.
|
||||||
|
@ -419,12 +419,12 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(
|
||||||
* Out: output64: a pointer to a 64-byte array to store the compact serialization
|
* Out: output64: a pointer to a 64-byte array to store the compact serialization
|
||||||
* In: sig: a pointer to an initialized signature object
|
* In: sig: a pointer to an initialized signature object
|
||||||
*
|
*
|
||||||
* See rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact for details about the encoding.
|
* See rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact for details about the encoding.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_compact(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_signature_serialize_compact(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *output64,
|
unsigned char *output64,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_signature* sig
|
const rustsecp256k1_v0_1_1_ecdsa_signature* sig
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
/** Verify an ECDSA signature.
|
/** Verify an ECDSA signature.
|
||||||
|
@ -440,16 +440,16 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_compact(
|
||||||
* form are accepted.
|
* form are accepted.
|
||||||
*
|
*
|
||||||
* If you need to accept ECDSA signatures from sources that do not obey this
|
* If you need to accept ECDSA signatures from sources that do not obey this
|
||||||
* rule, apply rustsecp256k1_v0_1_0_ecdsa_signature_normalize to the signature prior to
|
* rule, apply rustsecp256k1_v0_1_1_ecdsa_signature_normalize to the signature prior to
|
||||||
* validation, but be aware that doing so results in malleable signatures.
|
* validation, but be aware that doing so results in malleable signatures.
|
||||||
*
|
*
|
||||||
* For details, see the comments for that function.
|
* For details, see the comments for that function.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ecdsa_verify(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ecdsa_verify(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_signature *sig,
|
const rustsecp256k1_v0_1_1_ecdsa_signature *sig,
|
||||||
const unsigned char *msg32,
|
const unsigned char *msg32,
|
||||||
const rustsecp256k1_v0_1_0_pubkey *pubkey
|
const rustsecp256k1_v0_1_1_pubkey *pubkey
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
/** Convert a signature to a normalized lower-S form.
|
/** Convert a signature to a normalized lower-S form.
|
||||||
|
@ -489,25 +489,25 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ecdsa_verify
|
||||||
* accept various non-unique encodings, so care should be taken when this
|
* accept various non-unique encodings, so care should be taken when this
|
||||||
* property is required for an application.
|
* property is required for an application.
|
||||||
*
|
*
|
||||||
* The rustsecp256k1_v0_1_0_ecdsa_sign function will by default create signatures in the
|
* The rustsecp256k1_v0_1_1_ecdsa_sign function will by default create signatures in the
|
||||||
* lower-S form, and rustsecp256k1_v0_1_0_ecdsa_verify will not accept others. In case
|
* lower-S form, and rustsecp256k1_v0_1_1_ecdsa_verify will not accept others. In case
|
||||||
* signatures come from a system that cannot enforce this property,
|
* signatures come from a system that cannot enforce this property,
|
||||||
* rustsecp256k1_v0_1_0_ecdsa_signature_normalize must be called before verification.
|
* rustsecp256k1_v0_1_1_ecdsa_signature_normalize must be called before verification.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_signature_normalize(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_signature_normalize(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature *sigout,
|
rustsecp256k1_v0_1_1_ecdsa_signature *sigout,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_signature *sigin
|
const rustsecp256k1_v0_1_1_ecdsa_signature *sigin
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
/** An implementation of RFC6979 (using HMAC-SHA256) as nonce generation function.
|
/** An implementation of RFC6979 (using HMAC-SHA256) as nonce generation function.
|
||||||
* If a data pointer is passed, it is assumed to be a pointer to 32 bytes of
|
* If a data pointer is passed, it is assumed to be a pointer to 32 bytes of
|
||||||
* extra entropy.
|
* extra entropy.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API extern const rustsecp256k1_v0_1_0_nonce_function rustsecp256k1_v0_1_0_nonce_function_rfc6979;
|
SECP256K1_API extern const rustsecp256k1_v0_1_1_nonce_function rustsecp256k1_v0_1_1_nonce_function_rfc6979;
|
||||||
|
|
||||||
/** A default safe nonce generation function (currently equal to rustsecp256k1_v0_1_0_nonce_function_rfc6979). */
|
/** A default safe nonce generation function (currently equal to rustsecp256k1_v0_1_1_nonce_function_rfc6979). */
|
||||||
SECP256K1_API extern const rustsecp256k1_v0_1_0_nonce_function rustsecp256k1_v0_1_0_nonce_function_default;
|
SECP256K1_API extern const rustsecp256k1_v0_1_1_nonce_function rustsecp256k1_v0_1_1_nonce_function_default;
|
||||||
|
|
||||||
/** Create an ECDSA signature.
|
/** Create an ECDSA signature.
|
||||||
*
|
*
|
||||||
|
@ -517,18 +517,18 @@ SECP256K1_API extern const rustsecp256k1_v0_1_0_nonce_function rustsecp256k1_v0_
|
||||||
* Out: sig: pointer to an array where the signature will be placed (cannot be NULL)
|
* Out: sig: pointer to an array where the signature will be placed (cannot be NULL)
|
||||||
* In: msg32: the 32-byte message hash being signed (cannot be NULL)
|
* In: msg32: the 32-byte message hash being signed (cannot be NULL)
|
||||||
* seckey: pointer to a 32-byte secret key (cannot be NULL)
|
* seckey: pointer to a 32-byte secret key (cannot be NULL)
|
||||||
* noncefp:pointer to a nonce generation function. If NULL, rustsecp256k1_v0_1_0_nonce_function_default is used
|
* noncefp:pointer to a nonce generation function. If NULL, rustsecp256k1_v0_1_1_nonce_function_default is used
|
||||||
* ndata: pointer to arbitrary data used by the nonce generation function (can be NULL)
|
* ndata: pointer to arbitrary data used by the nonce generation function (can be NULL)
|
||||||
*
|
*
|
||||||
* The created signature is always in lower-S form. See
|
* The created signature is always in lower-S form. See
|
||||||
* rustsecp256k1_v0_1_0_ecdsa_signature_normalize for more details.
|
* rustsecp256k1_v0_1_1_ecdsa_signature_normalize for more details.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_sign(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_sign(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature *sig,
|
rustsecp256k1_v0_1_1_ecdsa_signature *sig,
|
||||||
const unsigned char *msg32,
|
const unsigned char *msg32,
|
||||||
const unsigned char *seckey,
|
const unsigned char *seckey,
|
||||||
rustsecp256k1_v0_1_0_nonce_function noncefp,
|
rustsecp256k1_v0_1_1_nonce_function noncefp,
|
||||||
const void *ndata
|
const void *ndata
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
|
@ -539,8 +539,8 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_sign(
|
||||||
* Args: ctx: pointer to a context object (cannot be NULL)
|
* Args: ctx: pointer to a context object (cannot be NULL)
|
||||||
* In: seckey: pointer to a 32-byte secret key (cannot be NULL)
|
* In: seckey: pointer to a 32-byte secret key (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_seckey_verify(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_seckey_verify(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
const unsigned char *seckey
|
const unsigned char *seckey
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
||||||
|
|
||||||
|
@ -552,9 +552,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_seckey_ve
|
||||||
* Out: pubkey: pointer to the created public key (cannot be NULL)
|
* Out: pubkey: pointer to the created public key (cannot be NULL)
|
||||||
* In: seckey: pointer to a 32-byte private key (cannot be NULL)
|
* In: seckey: pointer to a 32-byte private key (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_create(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_create(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *pubkey,
|
rustsecp256k1_v0_1_1_pubkey *pubkey,
|
||||||
const unsigned char *seckey
|
const unsigned char *seckey
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
|
@ -564,8 +564,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_cr
|
||||||
* Args: ctx: pointer to a context object
|
* Args: ctx: pointer to a context object
|
||||||
* In/Out: seckey: pointer to the 32-byte private key to be negated (cannot be NULL)
|
* In/Out: seckey: pointer to the 32-byte private key to be negated (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_negate(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_privkey_negate(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *seckey
|
unsigned char *seckey
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
||||||
|
|
||||||
|
@ -575,9 +575,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_n
|
||||||
* Args: ctx: pointer to a context object
|
* Args: ctx: pointer to a context object
|
||||||
* In/Out: pubkey: pointer to the public key to be negated (cannot be NULL)
|
* In/Out: pubkey: pointer to the public key to be negated (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_negate(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_negate(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *pubkey
|
rustsecp256k1_v0_1_1_pubkey *pubkey
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2);
|
||||||
|
|
||||||
/** Tweak a private key by adding tweak to it.
|
/** Tweak a private key by adding tweak to it.
|
||||||
|
@ -589,8 +589,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_ne
|
||||||
* In/Out: seckey: pointer to a 32-byte private key.
|
* In/Out: seckey: pointer to a 32-byte private key.
|
||||||
* In: tweak: pointer to a 32-byte tweak.
|
* In: tweak: pointer to a 32-byte tweak.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_tweak_add(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_privkey_tweak_add(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *seckey,
|
unsigned char *seckey,
|
||||||
const unsigned char *tweak
|
const unsigned char *tweak
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
@ -605,9 +605,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_t
|
||||||
* In/Out: pubkey: pointer to a public key object.
|
* In/Out: pubkey: pointer to a public key object.
|
||||||
* In: tweak: pointer to a 32-byte tweak.
|
* In: tweak: pointer to a 32-byte tweak.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_tweak_add(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_tweak_add(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *pubkey,
|
rustsecp256k1_v0_1_1_pubkey *pubkey,
|
||||||
const unsigned char *tweak
|
const unsigned char *tweak
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
|
@ -618,8 +618,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_tw
|
||||||
* In/Out: seckey: pointer to a 32-byte private key.
|
* In/Out: seckey: pointer to a 32-byte private key.
|
||||||
* In: tweak: pointer to a 32-byte tweak.
|
* In: tweak: pointer to a 32-byte tweak.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_tweak_mul(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_privkey_tweak_mul(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *seckey,
|
unsigned char *seckey,
|
||||||
const unsigned char *tweak
|
const unsigned char *tweak
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
@ -632,9 +632,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_privkey_t
|
||||||
* In/Out: pubkey: pointer to a public key obkect.
|
* In/Out: pubkey: pointer to a public key obkect.
|
||||||
* In: tweak: pointer to a 32-byte tweak.
|
* In: tweak: pointer to a 32-byte tweak.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_tweak_mul(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_tweak_mul(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *pubkey,
|
rustsecp256k1_v0_1_1_pubkey *pubkey,
|
||||||
const unsigned char *tweak
|
const unsigned char *tweak
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
|
@ -659,12 +659,12 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_tw
|
||||||
* guaranteed and may change in the future. It is safe to call this function on
|
* guaranteed and may change in the future. It is safe to call this function on
|
||||||
* contexts not initialized for signing; then it will have no effect and return 1.
|
* contexts not initialized for signing; then it will have no effect and return 1.
|
||||||
*
|
*
|
||||||
* You should call this after rustsecp256k1_v0_1_0_context_create or
|
* You should call this after rustsecp256k1_v0_1_1_context_create or
|
||||||
* rustsecp256k1_v0_1_0_context_clone (and rustsecp256k1_v0_1_0_context_preallocated_create or
|
* rustsecp256k1_v0_1_1_context_clone (and rustsecp256k1_v0_1_1_context_preallocated_create or
|
||||||
* rustsecp256k1_v0_1_0_context_clone, resp.), and you may call this repeatedly afterwards.
|
* rustsecp256k1_v0_1_1_context_clone, resp.), and you may call this repeatedly afterwards.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_context_randomize(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_context_randomize(
|
||||||
rustsecp256k1_v0_1_0_context* ctx,
|
rustsecp256k1_v0_1_1_context* ctx,
|
||||||
const unsigned char *seed32
|
const unsigned char *seed32
|
||||||
) SECP256K1_ARG_NONNULL(1);
|
) SECP256K1_ARG_NONNULL(1);
|
||||||
|
|
||||||
|
@ -677,10 +677,10 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_context_rand
|
||||||
* In: ins: pointer to array of pointers to public keys (cannot be NULL)
|
* In: ins: pointer to array of pointers to public keys (cannot be NULL)
|
||||||
* n: the number of public keys to add together (must be at least 1)
|
* n: the number of public keys to add together (must be at least 1)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ec_pubkey_combine(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ec_pubkey_combine(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *out,
|
rustsecp256k1_v0_1_1_pubkey *out,
|
||||||
const rustsecp256k1_v0_1_0_pubkey * const * ins,
|
const rustsecp256k1_v0_1_1_pubkey * const * ins,
|
||||||
size_t n
|
size_t n
|
||||||
) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ extern "C" {
|
||||||
* y: pointer to a 32-byte y coordinate
|
* y: pointer to a 32-byte y coordinate
|
||||||
* data: Arbitrary data pointer that is passed through
|
* data: Arbitrary data pointer that is passed through
|
||||||
*/
|
*/
|
||||||
typedef int (*rustsecp256k1_v0_1_0_ecdh_hash_function)(
|
typedef int (*rustsecp256k1_v0_1_1_ecdh_hash_function)(
|
||||||
unsigned char *output,
|
unsigned char *output,
|
||||||
const unsigned char *x,
|
const unsigned char *x,
|
||||||
const unsigned char *y,
|
const unsigned char *y,
|
||||||
|
@ -23,28 +23,28 @@ typedef int (*rustsecp256k1_v0_1_0_ecdh_hash_function)(
|
||||||
);
|
);
|
||||||
|
|
||||||
/** An implementation of SHA256 hash function that applies to compressed public key. */
|
/** An implementation of SHA256 hash function that applies to compressed public key. */
|
||||||
SECP256K1_API extern const rustsecp256k1_v0_1_0_ecdh_hash_function rustsecp256k1_v0_1_0_ecdh_hash_function_sha256;
|
SECP256K1_API extern const rustsecp256k1_v0_1_1_ecdh_hash_function rustsecp256k1_v0_1_1_ecdh_hash_function_sha256;
|
||||||
|
|
||||||
/** A default ecdh hash function (currently equal to rustsecp256k1_v0_1_0_ecdh_hash_function_sha256). */
|
/** A default ecdh hash function (currently equal to rustsecp256k1_v0_1_1_ecdh_hash_function_sha256). */
|
||||||
SECP256K1_API extern const rustsecp256k1_v0_1_0_ecdh_hash_function rustsecp256k1_v0_1_0_ecdh_hash_function_default;
|
SECP256K1_API extern const rustsecp256k1_v0_1_1_ecdh_hash_function rustsecp256k1_v0_1_1_ecdh_hash_function_default;
|
||||||
|
|
||||||
/** Compute an EC Diffie-Hellman secret in constant time
|
/** Compute an EC Diffie-Hellman secret in constant time
|
||||||
* Returns: 1: exponentiation was successful
|
* Returns: 1: exponentiation was successful
|
||||||
* 0: scalar was invalid (zero or overflow)
|
* 0: scalar was invalid (zero or overflow)
|
||||||
* Args: ctx: pointer to a context object (cannot be NULL)
|
* Args: ctx: pointer to a context object (cannot be NULL)
|
||||||
* Out: output: pointer to an array to be filled by the function
|
* Out: output: pointer to an array to be filled by the function
|
||||||
* In: pubkey: a pointer to a rustsecp256k1_v0_1_0_pubkey containing an
|
* In: pubkey: a pointer to a rustsecp256k1_v0_1_1_pubkey containing an
|
||||||
* initialized public key
|
* initialized public key
|
||||||
* privkey: a 32-byte scalar with which to multiply the point
|
* privkey: a 32-byte scalar with which to multiply the point
|
||||||
* hashfp: pointer to a hash function. If NULL, rustsecp256k1_v0_1_0_ecdh_hash_function_sha256 is used
|
* hashfp: pointer to a hash function. If NULL, rustsecp256k1_v0_1_1_ecdh_hash_function_sha256 is used
|
||||||
* data: Arbitrary data pointer that is passed through
|
* data: Arbitrary data pointer that is passed through
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ecdh(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ecdh(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *output,
|
unsigned char *output,
|
||||||
const rustsecp256k1_v0_1_0_pubkey *pubkey,
|
const rustsecp256k1_v0_1_1_pubkey *pubkey,
|
||||||
const unsigned char *privkey,
|
const unsigned char *privkey,
|
||||||
rustsecp256k1_v0_1_0_ecdh_hash_function hashfp,
|
rustsecp256k1_v0_1_1_ecdh_hash_function hashfp,
|
||||||
void *data
|
void *data
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
|
|
|
@ -16,8 +16,8 @@ extern "C" {
|
||||||
* objects created by functions in secp256k1.h, i.e., they can be passed to any
|
* objects created by functions in secp256k1.h, i.e., they can be passed to any
|
||||||
* API function that excepts a context object (see secp256k1.h for details). The
|
* API function that excepts a context object (see secp256k1.h for details). The
|
||||||
* only exception is that context objects created by functions in this module
|
* only exception is that context objects created by functions in this module
|
||||||
* must be destroyed using rustsecp256k1_v0_1_0_context_preallocated_destroy (in this
|
* must be destroyed using rustsecp256k1_v0_1_1_context_preallocated_destroy (in this
|
||||||
* module) instead of rustsecp256k1_v0_1_0_context_destroy (in secp256k1.h).
|
* module) instead of rustsecp256k1_v0_1_1_context_destroy (in secp256k1.h).
|
||||||
*
|
*
|
||||||
* It is guaranteed that functions in by this module will not call malloc or its
|
* It is guaranteed that functions in by this module will not call malloc or its
|
||||||
* friends realloc, calloc, and free.
|
* friends realloc, calloc, and free.
|
||||||
|
@ -27,24 +27,24 @@ extern "C" {
|
||||||
* caller-provided memory.
|
* caller-provided memory.
|
||||||
*
|
*
|
||||||
* The purpose of this function is to determine how much memory must be provided
|
* The purpose of this function is to determine how much memory must be provided
|
||||||
* to rustsecp256k1_v0_1_0_context_preallocated_create.
|
* to rustsecp256k1_v0_1_1_context_preallocated_create.
|
||||||
*
|
*
|
||||||
* Returns: the required size of the caller-provided memory block
|
* Returns: the required size of the caller-provided memory block
|
||||||
* In: flags: which parts of the context to initialize.
|
* In: flags: which parts of the context to initialize.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API size_t rustsecp256k1_v0_1_0_context_preallocated_size(
|
SECP256K1_API size_t rustsecp256k1_v0_1_1_context_preallocated_size(
|
||||||
unsigned int flags
|
unsigned int flags
|
||||||
) SECP256K1_WARN_UNUSED_RESULT;
|
) SECP256K1_WARN_UNUSED_RESULT;
|
||||||
|
|
||||||
/** Create a secp256k1 context object in caller-provided memory.
|
/** Create a secp256k1 context object in caller-provided memory.
|
||||||
*
|
*
|
||||||
* The caller must provide a pointer to a rewritable contiguous block of memory
|
* The caller must provide a pointer to a rewritable contiguous block of memory
|
||||||
* of size at least rustsecp256k1_v0_1_0_context_preallocated_size(flags) bytes, suitably
|
* of size at least rustsecp256k1_v0_1_1_context_preallocated_size(flags) bytes, suitably
|
||||||
* aligned to hold an object of any type.
|
* aligned to hold an object of any type.
|
||||||
*
|
*
|
||||||
* The block of memory is exclusively owned by the created context object during
|
* The block of memory is exclusively owned by the created context object during
|
||||||
* the lifetime of this context object, which begins with the call to this
|
* the lifetime of this context object, which begins with the call to this
|
||||||
* function and ends when a call to rustsecp256k1_v0_1_0_context_preallocated_destroy
|
* function and ends when a call to rustsecp256k1_v0_1_1_context_preallocated_destroy
|
||||||
* (which destroys the context object again) returns. During the lifetime of the
|
* (which destroys the context object again) returns. During the lifetime of the
|
||||||
* context object, the caller is obligated not to access this block of memory,
|
* context object, the caller is obligated not to access this block of memory,
|
||||||
* i.e., the caller may not read or write the memory, e.g., by copying the memory
|
* i.e., the caller may not read or write the memory, e.g., by copying the memory
|
||||||
|
@ -54,14 +54,14 @@ SECP256K1_API size_t rustsecp256k1_v0_1_0_context_preallocated_size(
|
||||||
*
|
*
|
||||||
* Returns: a newly created context object.
|
* Returns: a newly created context object.
|
||||||
* In: prealloc: a pointer to a rewritable contiguous block of memory of
|
* In: prealloc: a pointer to a rewritable contiguous block of memory of
|
||||||
* size at least rustsecp256k1_v0_1_0_context_preallocated_size(flags)
|
* size at least rustsecp256k1_v0_1_1_context_preallocated_size(flags)
|
||||||
* bytes, as detailed above (cannot be NULL)
|
* bytes, as detailed above (cannot be NULL)
|
||||||
* flags: which parts of the context to initialize.
|
* flags: which parts of the context to initialize.
|
||||||
*
|
*
|
||||||
* See also rustsecp256k1_v0_1_0_context_randomize (in secp256k1.h)
|
* See also rustsecp256k1_v0_1_1_context_randomize (in secp256k1.h)
|
||||||
* and rustsecp256k1_v0_1_0_context_preallocated_destroy.
|
* and rustsecp256k1_v0_1_1_context_preallocated_destroy.
|
||||||
*/
|
*/
|
||||||
SECP256K1_API rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallocated_create(
|
SECP256K1_API rustsecp256k1_v0_1_1_context* rustsecp256k1_v0_1_1_context_preallocated_create(
|
||||||
void* prealloc,
|
void* prealloc,
|
||||||
unsigned int flags
|
unsigned int flags
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT;
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT;
|
||||||
|
@ -72,28 +72,28 @@ SECP256K1_API rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallo
|
||||||
* Returns: the required size of the caller-provided memory block.
|
* Returns: the required size of the caller-provided memory block.
|
||||||
* In: ctx: an existing context to copy (cannot be NULL)
|
* In: ctx: an existing context to copy (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API size_t rustsecp256k1_v0_1_0_context_preallocated_clone_size(
|
SECP256K1_API size_t rustsecp256k1_v0_1_1_context_preallocated_clone_size(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx
|
const rustsecp256k1_v0_1_1_context* ctx
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT;
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT;
|
||||||
|
|
||||||
/** Copy a secp256k1 context object into caller-provided memory.
|
/** Copy a secp256k1 context object into caller-provided memory.
|
||||||
*
|
*
|
||||||
* The caller must provide a pointer to a rewritable contiguous block of memory
|
* The caller must provide a pointer to a rewritable contiguous block of memory
|
||||||
* of size at least rustsecp256k1_v0_1_0_context_preallocated_size(flags) bytes, suitably
|
* of size at least rustsecp256k1_v0_1_1_context_preallocated_size(flags) bytes, suitably
|
||||||
* aligned to hold an object of any type.
|
* aligned to hold an object of any type.
|
||||||
*
|
*
|
||||||
* The block of memory is exclusively owned by the created context object during
|
* The block of memory is exclusively owned by the created context object during
|
||||||
* the lifetime of this context object, see the description of
|
* the lifetime of this context object, see the description of
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_create for details.
|
* rustsecp256k1_v0_1_1_context_preallocated_create for details.
|
||||||
*
|
*
|
||||||
* Returns: a newly created context object.
|
* Returns: a newly created context object.
|
||||||
* Args: ctx: an existing context to copy (cannot be NULL)
|
* Args: ctx: an existing context to copy (cannot be NULL)
|
||||||
* In: prealloc: a pointer to a rewritable contiguous block of memory of
|
* In: prealloc: a pointer to a rewritable contiguous block of memory of
|
||||||
* size at least rustsecp256k1_v0_1_0_context_preallocated_size(flags)
|
* size at least rustsecp256k1_v0_1_1_context_preallocated_size(flags)
|
||||||
* bytes, as detailed above (cannot be NULL)
|
* bytes, as detailed above (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallocated_clone(
|
SECP256K1_API rustsecp256k1_v0_1_1_context* rustsecp256k1_v0_1_1_context_preallocated_clone(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
void* prealloc
|
void* prealloc
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_WARN_UNUSED_RESULT;
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_WARN_UNUSED_RESULT;
|
||||||
|
|
||||||
|
@ -103,22 +103,22 @@ SECP256K1_API rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallo
|
||||||
* The context pointer may not be used afterwards.
|
* The context pointer may not be used afterwards.
|
||||||
*
|
*
|
||||||
* The context to destroy must have been created using
|
* The context to destroy must have been created using
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_create or rustsecp256k1_v0_1_0_context_preallocated_clone.
|
* rustsecp256k1_v0_1_1_context_preallocated_create or rustsecp256k1_v0_1_1_context_preallocated_clone.
|
||||||
* If the context has instead been created using rustsecp256k1_v0_1_0_context_create or
|
* If the context has instead been created using rustsecp256k1_v0_1_1_context_create or
|
||||||
* rustsecp256k1_v0_1_0_context_clone, the behaviour is undefined. In that case,
|
* rustsecp256k1_v0_1_1_context_clone, the behaviour is undefined. In that case,
|
||||||
* rustsecp256k1_v0_1_0_context_destroy must be used instead.
|
* rustsecp256k1_v0_1_1_context_destroy must be used instead.
|
||||||
*
|
*
|
||||||
* If required, it is the responsibility of the caller to deallocate the block
|
* If required, it is the responsibility of the caller to deallocate the block
|
||||||
* of memory properly after this function returns, e.g., by calling free on the
|
* of memory properly after this function returns, e.g., by calling free on the
|
||||||
* preallocated pointer given to rustsecp256k1_v0_1_0_context_preallocated_create or
|
* preallocated pointer given to rustsecp256k1_v0_1_1_context_preallocated_create or
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_clone.
|
* rustsecp256k1_v0_1_1_context_preallocated_clone.
|
||||||
*
|
*
|
||||||
* Args: ctx: an existing context to destroy, constructed using
|
* Args: ctx: an existing context to destroy, constructed using
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_create or
|
* rustsecp256k1_v0_1_1_context_preallocated_create or
|
||||||
* rustsecp256k1_v0_1_0_context_preallocated_clone (cannot be NULL)
|
* rustsecp256k1_v0_1_1_context_preallocated_clone (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API void rustsecp256k1_v0_1_0_context_preallocated_destroy(
|
SECP256K1_API void rustsecp256k1_v0_1_1_context_preallocated_destroy(
|
||||||
rustsecp256k1_v0_1_0_context* ctx
|
rustsecp256k1_v0_1_1_context* ctx
|
||||||
);
|
);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
|
|
@ -14,8 +14,8 @@ extern "C" {
|
||||||
* guaranteed to be portable between different platforms or versions. It is
|
* guaranteed to be portable between different platforms or versions. It is
|
||||||
* however guaranteed to be 65 bytes in size, and can be safely copied/moved.
|
* however guaranteed to be 65 bytes in size, and can be safely copied/moved.
|
||||||
* If you need to convert to a format suitable for storage or transmission, use
|
* If you need to convert to a format suitable for storage or transmission, use
|
||||||
* the rustsecp256k1_v0_1_0_ecdsa_signature_serialize_* and
|
* the rustsecp256k1_v0_1_1_ecdsa_signature_serialize_* and
|
||||||
* rustsecp256k1_v0_1_0_ecdsa_signature_parse_* functions.
|
* rustsecp256k1_v0_1_1_ecdsa_signature_parse_* functions.
|
||||||
*
|
*
|
||||||
* Furthermore, it is guaranteed that identical signatures (including their
|
* Furthermore, it is guaranteed that identical signatures (including their
|
||||||
* recoverability) will have identical representation, so they can be
|
* recoverability) will have identical representation, so they can be
|
||||||
|
@ -23,7 +23,7 @@ extern "C" {
|
||||||
*/
|
*/
|
||||||
typedef struct {
|
typedef struct {
|
||||||
unsigned char data[65];
|
unsigned char data[65];
|
||||||
} rustsecp256k1_v0_1_0_ecdsa_recoverable_signature;
|
} rustsecp256k1_v0_1_1_ecdsa_recoverable_signature;
|
||||||
|
|
||||||
/** Parse a compact ECDSA signature (64 bytes + recovery id).
|
/** Parse a compact ECDSA signature (64 bytes + recovery id).
|
||||||
*
|
*
|
||||||
|
@ -33,9 +33,9 @@ typedef struct {
|
||||||
* In: input64: a pointer to a 64-byte compact signature
|
* In: input64: a pointer to a 64-byte compact signature
|
||||||
* recid: the recovery id (0, 1, 2 or 3)
|
* recid: the recovery id (0, 1, 2 or 3)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig,
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig,
|
||||||
const unsigned char *input64,
|
const unsigned char *input64,
|
||||||
int recid
|
int recid
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
@ -46,10 +46,10 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact
|
||||||
* Out: sig: a pointer to a normal signature (cannot be NULL).
|
* Out: sig: a pointer to a normal signature (cannot be NULL).
|
||||||
* In: sigin: a pointer to a recoverable signature (cannot be NULL).
|
* In: sigin: a pointer to a recoverable signature (cannot be NULL).
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature* sig,
|
rustsecp256k1_v0_1_1_ecdsa_signature* sig,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sigin
|
const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sigin
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3);
|
||||||
|
|
||||||
/** Serialize an ECDSA signature in compact format (64 bytes + recovery id).
|
/** Serialize an ECDSA signature in compact format (64 bytes + recovery id).
|
||||||
|
@ -60,11 +60,11 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(
|
||||||
* recid: a pointer to an integer to hold the recovery id (can be NULL).
|
* recid: a pointer to an integer to hold the recovery id (can be NULL).
|
||||||
* In: sig: a pointer to an initialized signature object (cannot be NULL)
|
* In: sig: a pointer to an initialized signature object (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
unsigned char *output64,
|
unsigned char *output64,
|
||||||
int *recid,
|
int *recid,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig
|
const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
/** Create a recoverable ECDSA signature.
|
/** Create a recoverable ECDSA signature.
|
||||||
|
@ -75,15 +75,15 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_com
|
||||||
* Out: sig: pointer to an array where the signature will be placed (cannot be NULL)
|
* Out: sig: pointer to an array where the signature will be placed (cannot be NULL)
|
||||||
* In: msg32: the 32-byte message hash being signed (cannot be NULL)
|
* In: msg32: the 32-byte message hash being signed (cannot be NULL)
|
||||||
* seckey: pointer to a 32-byte secret key (cannot be NULL)
|
* seckey: pointer to a 32-byte secret key (cannot be NULL)
|
||||||
* noncefp:pointer to a nonce generation function. If NULL, rustsecp256k1_v0_1_0_nonce_function_default is used
|
* noncefp:pointer to a nonce generation function. If NULL, rustsecp256k1_v0_1_1_nonce_function_default is used
|
||||||
* ndata: pointer to arbitrary data used by the nonce generation function (can be NULL)
|
* ndata: pointer to arbitrary data used by the nonce generation function (can be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(
|
SECP256K1_API int rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature *sig,
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature *sig,
|
||||||
const unsigned char *msg32,
|
const unsigned char *msg32,
|
||||||
const unsigned char *seckey,
|
const unsigned char *seckey,
|
||||||
rustsecp256k1_v0_1_0_nonce_function noncefp,
|
rustsecp256k1_v0_1_1_nonce_function noncefp,
|
||||||
const void *ndata
|
const void *ndata
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
|
@ -96,10 +96,10 @@ SECP256K1_API int rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(
|
||||||
* In: sig: pointer to initialized signature that supports pubkey recovery (cannot be NULL)
|
* In: sig: pointer to initialized signature that supports pubkey recovery (cannot be NULL)
|
||||||
* msg32: the 32-byte message hash assumed to be signed (cannot be NULL)
|
* msg32: the 32-byte message hash assumed to be signed (cannot be NULL)
|
||||||
*/
|
*/
|
||||||
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_0_ecdsa_recover(
|
SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1_v0_1_1_ecdsa_recover(
|
||||||
const rustsecp256k1_v0_1_0_context* ctx,
|
const rustsecp256k1_v0_1_1_context* ctx,
|
||||||
rustsecp256k1_v0_1_0_pubkey *pubkey,
|
rustsecp256k1_v0_1_1_pubkey *pubkey,
|
||||||
const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature *sig,
|
const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature *sig,
|
||||||
const unsigned char *msg32
|
const unsigned char *msg32
|
||||||
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4);
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,8 @@ import sys
|
||||||
load("group_prover.sage")
|
load("group_prover.sage")
|
||||||
load("weierstrass_prover.sage")
|
load("weierstrass_prover.sage")
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_double_var(a):
|
def formula_rustsecp256k1_v0_1_1_gej_double_var(a):
|
||||||
"""libsecp256k1's rustsecp256k1_v0_1_0_gej_double_var, used by various addition functions"""
|
"""libsecp256k1's rustsecp256k1_v0_1_1_gej_double_var, used by various addition functions"""
|
||||||
rz = a.Z * a.Y
|
rz = a.Z * a.Y
|
||||||
rz = rz * 2
|
rz = rz * 2
|
||||||
t1 = a.X^2
|
t1 = a.X^2
|
||||||
|
@ -29,8 +29,8 @@ def formula_rustsecp256k1_v0_1_0_gej_double_var(a):
|
||||||
ry = ry + t2
|
ry = ry + t2
|
||||||
return jacobianpoint(rx, ry, rz)
|
return jacobianpoint(rx, ry, rz)
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_add_var(branch, a, b):
|
def formula_rustsecp256k1_v0_1_1_gej_add_var(branch, a, b):
|
||||||
"""libsecp256k1's rustsecp256k1_v0_1_0_gej_add_var"""
|
"""libsecp256k1's rustsecp256k1_v0_1_1_gej_add_var"""
|
||||||
if branch == 0:
|
if branch == 0:
|
||||||
return (constraints(), constraints(nonzero={a.Infinity : 'a_infinite'}), b)
|
return (constraints(), constraints(nonzero={a.Infinity : 'a_infinite'}), b)
|
||||||
if branch == 1:
|
if branch == 1:
|
||||||
|
@ -48,7 +48,7 @@ def formula_rustsecp256k1_v0_1_0_gej_add_var(branch, a, b):
|
||||||
i = -s1
|
i = -s1
|
||||||
i = i + s2
|
i = i + s2
|
||||||
if branch == 2:
|
if branch == 2:
|
||||||
r = formula_rustsecp256k1_v0_1_0_gej_double_var(a)
|
r = formula_rustsecp256k1_v0_1_1_gej_double_var(a)
|
||||||
return (constraints(), constraints(zero={h : 'h=0', i : 'i=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}), r)
|
return (constraints(), constraints(zero={h : 'h=0', i : 'i=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}), r)
|
||||||
if branch == 3:
|
if branch == 3:
|
||||||
return (constraints(), constraints(zero={h : 'h=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
return (constraints(), constraints(zero={h : 'h=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
||||||
|
@ -71,8 +71,8 @@ def formula_rustsecp256k1_v0_1_0_gej_add_var(branch, a, b):
|
||||||
ry = ry + h3
|
ry = ry + h3
|
||||||
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_add_ge_var(branch, a, b):
|
def formula_rustsecp256k1_v0_1_1_gej_add_ge_var(branch, a, b):
|
||||||
"""libsecp256k1's rustsecp256k1_v0_1_0_gej_add_ge_var, which assume bz==1"""
|
"""libsecp256k1's rustsecp256k1_v0_1_1_gej_add_ge_var, which assume bz==1"""
|
||||||
if branch == 0:
|
if branch == 0:
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(nonzero={a.Infinity : 'a_infinite'}), b)
|
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(nonzero={a.Infinity : 'a_infinite'}), b)
|
||||||
if branch == 1:
|
if branch == 1:
|
||||||
|
@ -88,7 +88,7 @@ def formula_rustsecp256k1_v0_1_0_gej_add_ge_var(branch, a, b):
|
||||||
i = -s1
|
i = -s1
|
||||||
i = i + s2
|
i = i + s2
|
||||||
if (branch == 2):
|
if (branch == 2):
|
||||||
r = formula_rustsecp256k1_v0_1_0_gej_double_var(a)
|
r = formula_rustsecp256k1_v0_1_1_gej_double_var(a)
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r)
|
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r)
|
||||||
if (branch == 3):
|
if (branch == 3):
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
||||||
|
@ -110,8 +110,8 @@ def formula_rustsecp256k1_v0_1_0_gej_add_ge_var(branch, a, b):
|
||||||
ry = ry + h3
|
ry = ry + h3
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_add_zinv_var(branch, a, b):
|
def formula_rustsecp256k1_v0_1_1_gej_add_zinv_var(branch, a, b):
|
||||||
"""libsecp256k1's rustsecp256k1_v0_1_0_gej_add_zinv_var"""
|
"""libsecp256k1's rustsecp256k1_v0_1_1_gej_add_zinv_var"""
|
||||||
bzinv = b.Z^(-1)
|
bzinv = b.Z^(-1)
|
||||||
if branch == 0:
|
if branch == 0:
|
||||||
return (constraints(), constraints(nonzero={b.Infinity : 'b_infinite'}), a)
|
return (constraints(), constraints(nonzero={b.Infinity : 'b_infinite'}), a)
|
||||||
|
@ -134,7 +134,7 @@ def formula_rustsecp256k1_v0_1_0_gej_add_zinv_var(branch, a, b):
|
||||||
i = -s1
|
i = -s1
|
||||||
i = i + s2
|
i = i + s2
|
||||||
if branch == 2:
|
if branch == 2:
|
||||||
r = formula_rustsecp256k1_v0_1_0_gej_double_var(a)
|
r = formula_rustsecp256k1_v0_1_1_gej_double_var(a)
|
||||||
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r)
|
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r)
|
||||||
if branch == 3:
|
if branch == 3:
|
||||||
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity())
|
||||||
|
@ -157,8 +157,8 @@ def formula_rustsecp256k1_v0_1_0_gej_add_zinv_var(branch, a, b):
|
||||||
ry = ry + h3
|
ry = ry + h3
|
||||||
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz))
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_add_ge(branch, a, b):
|
def formula_rustsecp256k1_v0_1_1_gej_add_ge(branch, a, b):
|
||||||
"""libsecp256k1's rustsecp256k1_v0_1_0_gej_add_ge"""
|
"""libsecp256k1's rustsecp256k1_v0_1_1_gej_add_ge"""
|
||||||
zeroes = {}
|
zeroes = {}
|
||||||
nonzeroes = {}
|
nonzeroes = {}
|
||||||
a_infinity = False
|
a_infinity = False
|
||||||
|
@ -229,8 +229,8 @@ def formula_rustsecp256k1_v0_1_0_gej_add_ge(branch, a, b):
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zeroes, nonzero=nonzeroes), point_at_infinity())
|
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zeroes, nonzero=nonzeroes), point_at_infinity())
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zeroes, nonzero=nonzeroes), jacobianpoint(rx, ry, rz))
|
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zeroes, nonzero=nonzeroes), jacobianpoint(rx, ry, rz))
|
||||||
|
|
||||||
def formula_rustsecp256k1_v0_1_0_gej_add_ge_old(branch, a, b):
|
def formula_rustsecp256k1_v0_1_1_gej_add_ge_old(branch, a, b):
|
||||||
"""libsecp256k1's old rustsecp256k1_v0_1_0_gej_add_ge, which fails when ay+by=0 but ax!=bx"""
|
"""libsecp256k1's old rustsecp256k1_v0_1_1_gej_add_ge, which fails when ay+by=0 but ax!=bx"""
|
||||||
a_infinity = (branch & 1) != 0
|
a_infinity = (branch & 1) != 0
|
||||||
zero = {}
|
zero = {}
|
||||||
nonzero = {}
|
nonzero = {}
|
||||||
|
@ -292,15 +292,15 @@ def formula_rustsecp256k1_v0_1_0_gej_add_ge_old(branch, a, b):
|
||||||
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zero, nonzero=nonzero), jacobianpoint(rx, ry, rz))
|
return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zero, nonzero=nonzero), jacobianpoint(rx, ry, rz))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_var)
|
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_var)
|
||||||
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_ge_var)
|
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_ge_var)
|
||||||
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_zinv_var)
|
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_zinv_var)
|
||||||
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge", 0, 7, 16, formula_rustsecp256k1_v0_1_0_gej_add_ge)
|
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge", 0, 7, 16, formula_rustsecp256k1_v0_1_1_gej_add_ge)
|
||||||
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1_v0_1_0_gej_add_ge_old)
|
check_symbolic_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1_v0_1_1_gej_add_ge_old)
|
||||||
|
|
||||||
if len(sys.argv) >= 2 and sys.argv[1] == "--exhaustive":
|
if len(sys.argv) >= 2 and sys.argv[1] == "--exhaustive":
|
||||||
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_var, 43)
|
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_var, 43)
|
||||||
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_ge_var, 43)
|
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_ge_var, 43)
|
||||||
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1_v0_1_0_gej_add_zinv_var, 43)
|
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1_v0_1_1_gej_add_zinv_var, 43)
|
||||||
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge", 0, 7, 16, formula_rustsecp256k1_v0_1_0_gej_add_ge, 43)
|
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge", 0, 7, 16, formula_rustsecp256k1_v0_1_1_gej_add_ge, 43)
|
||||||
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_0_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1_v0_1_0_gej_add_ge_old, 43)
|
check_exhaustive_jacobian_weierstrass("rustsecp256k1_v0_1_1_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1_v0_1_1_gej_add_ge_old, 43)
|
||||||
|
|
|
@ -27,8 +27,8 @@ Note:
|
||||||
.set field_not_M, 0xfc000000 @ ~M = ~0x3ffffff
|
.set field_not_M, 0xfc000000 @ ~M = ~0x3ffffff
|
||||||
|
|
||||||
.align 2
|
.align 2
|
||||||
.global rustsecp256k1_v0_1_0_fe_mul_inner
|
.global rustsecp256k1_v0_1_1_fe_mul_inner
|
||||||
.type rustsecp256k1_v0_1_0_fe_mul_inner, %function
|
.type rustsecp256k1_v0_1_1_fe_mul_inner, %function
|
||||||
@ Arguments:
|
@ Arguments:
|
||||||
@ r0 r Restrict: can overlap with a, not with b
|
@ r0 r Restrict: can overlap with a, not with b
|
||||||
@ r1 a
|
@ r1 a
|
||||||
|
@ -36,7 +36,7 @@ Note:
|
||||||
@ Stack (total 4+10*4 = 44)
|
@ Stack (total 4+10*4 = 44)
|
||||||
@ sp + #0 saved 'r' pointer
|
@ sp + #0 saved 'r' pointer
|
||||||
@ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9
|
@ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9
|
||||||
rustsecp256k1_v0_1_0_fe_mul_inner:
|
rustsecp256k1_v0_1_1_fe_mul_inner:
|
||||||
stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14}
|
stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14}
|
||||||
sub sp, sp, #48 @ frame=44 + alignment
|
sub sp, sp, #48 @ frame=44 + alignment
|
||||||
str r0, [sp, #0] @ save result address, we need it only at the end
|
str r0, [sp, #0] @ save result address, we need it only at the end
|
||||||
|
@ -511,18 +511,18 @@ rustsecp256k1_v0_1_0_fe_mul_inner:
|
||||||
|
|
||||||
add sp, sp, #48
|
add sp, sp, #48
|
||||||
ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc}
|
ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc}
|
||||||
.size rustsecp256k1_v0_1_0_fe_mul_inner, .-rustsecp256k1_v0_1_0_fe_mul_inner
|
.size rustsecp256k1_v0_1_1_fe_mul_inner, .-rustsecp256k1_v0_1_1_fe_mul_inner
|
||||||
|
|
||||||
.align 2
|
.align 2
|
||||||
.global rustsecp256k1_v0_1_0_fe_sqr_inner
|
.global rustsecp256k1_v0_1_1_fe_sqr_inner
|
||||||
.type rustsecp256k1_v0_1_0_fe_sqr_inner, %function
|
.type rustsecp256k1_v0_1_1_fe_sqr_inner, %function
|
||||||
@ Arguments:
|
@ Arguments:
|
||||||
@ r0 r Can overlap with a
|
@ r0 r Can overlap with a
|
||||||
@ r1 a
|
@ r1 a
|
||||||
@ Stack (total 4+10*4 = 44)
|
@ Stack (total 4+10*4 = 44)
|
||||||
@ sp + #0 saved 'r' pointer
|
@ sp + #0 saved 'r' pointer
|
||||||
@ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9
|
@ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9
|
||||||
rustsecp256k1_v0_1_0_fe_sqr_inner:
|
rustsecp256k1_v0_1_1_fe_sqr_inner:
|
||||||
stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14}
|
stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14}
|
||||||
sub sp, sp, #48 @ frame=44 + alignment
|
sub sp, sp, #48 @ frame=44 + alignment
|
||||||
str r0, [sp, #0] @ save result address, we need it only at the end
|
str r0, [sp, #0] @ save result address, we need it only at the end
|
||||||
|
@ -909,5 +909,5 @@ rustsecp256k1_v0_1_0_fe_sqr_inner:
|
||||||
|
|
||||||
add sp, sp, #48
|
add sp, sp, #48
|
||||||
ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc}
|
ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc}
|
||||||
.size rustsecp256k1_v0_1_0_fe_sqr_inner, .-rustsecp256k1_v0_1_0_fe_sqr_inner
|
.size rustsecp256k1_v0_1_1_fe_sqr_inner, .-rustsecp256k1_v0_1_1_fe_sqr_inner
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,8 @@
|
||||||
#include "bench.h"
|
#include "bench.h"
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_context *ctx;
|
rustsecp256k1_v0_1_1_context *ctx;
|
||||||
rustsecp256k1_v0_1_0_pubkey point;
|
rustsecp256k1_v0_1_1_pubkey point;
|
||||||
unsigned char scalar[32];
|
unsigned char scalar[32];
|
||||||
} bench_ecdh_data;
|
} bench_ecdh_data;
|
||||||
|
|
||||||
|
@ -29,11 +29,11 @@ static void bench_ecdh_setup(void* arg) {
|
||||||
};
|
};
|
||||||
|
|
||||||
/* create a context with no capabilities */
|
/* create a context with no capabilities */
|
||||||
data->ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_FLAGS_TYPE_CONTEXT);
|
data->ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_FLAGS_TYPE_CONTEXT);
|
||||||
for (i = 0; i < 32; i++) {
|
for (i = 0; i < 32; i++) {
|
||||||
data->scalar[i] = i + 1;
|
data->scalar[i] = i + 1;
|
||||||
}
|
}
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_parse(data->ctx, &data->point, point, sizeof(point)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_parse(data->ctx, &data->point, point, sizeof(point)) == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void bench_ecdh(void* arg) {
|
static void bench_ecdh(void* arg) {
|
||||||
|
@ -42,7 +42,7 @@ static void bench_ecdh(void* arg) {
|
||||||
bench_ecdh_data *data = (bench_ecdh_data*)arg;
|
bench_ecdh_data *data = (bench_ecdh_data*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(data->ctx, res, &data->point, data->scalar, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(data->ctx, res, &data->point, data->scalar, NULL, NULL) == 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,13 +22,13 @@
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
/* Setup once in advance */
|
/* Setup once in advance */
|
||||||
rustsecp256k1_v0_1_0_context* ctx;
|
rustsecp256k1_v0_1_1_context* ctx;
|
||||||
rustsecp256k1_v0_1_0_scratch_space* scratch;
|
rustsecp256k1_v0_1_1_scratch_space* scratch;
|
||||||
rustsecp256k1_v0_1_0_scalar* scalars;
|
rustsecp256k1_v0_1_1_scalar* scalars;
|
||||||
rustsecp256k1_v0_1_0_ge* pubkeys;
|
rustsecp256k1_v0_1_1_ge* pubkeys;
|
||||||
rustsecp256k1_v0_1_0_scalar* seckeys;
|
rustsecp256k1_v0_1_1_scalar* seckeys;
|
||||||
rustsecp256k1_v0_1_0_gej* expected_output;
|
rustsecp256k1_v0_1_1_gej* expected_output;
|
||||||
rustsecp256k1_v0_1_0_ecmult_multi_func ecmult_multi;
|
rustsecp256k1_v0_1_1_ecmult_multi_func ecmult_multi;
|
||||||
|
|
||||||
/* Changes per test */
|
/* Changes per test */
|
||||||
size_t count;
|
size_t count;
|
||||||
|
@ -39,15 +39,15 @@ typedef struct {
|
||||||
size_t offset2;
|
size_t offset2;
|
||||||
|
|
||||||
/* Test output. */
|
/* Test output. */
|
||||||
rustsecp256k1_v0_1_0_gej* output;
|
rustsecp256k1_v0_1_1_gej* output;
|
||||||
} bench_data;
|
} bench_data;
|
||||||
|
|
||||||
static int bench_callback(rustsecp256k1_v0_1_0_scalar* sc, rustsecp256k1_v0_1_0_ge* ge, size_t idx, void* arg) {
|
static int bench_callback(rustsecp256k1_v0_1_1_scalar* sc, rustsecp256k1_v0_1_1_ge* ge, size_t idx, void* arg) {
|
||||||
bench_data* data = (bench_data*)arg;
|
bench_data* data = (bench_data*)arg;
|
||||||
if (data->includes_g) ++idx;
|
if (data->includes_g) ++idx;
|
||||||
if (idx == 0) {
|
if (idx == 0) {
|
||||||
*sc = data->scalars[data->offset1];
|
*sc = data->scalars[data->offset1];
|
||||||
*ge = rustsecp256k1_v0_1_0_ge_const_g;
|
*ge = rustsecp256k1_v0_1_1_ge_const_g;
|
||||||
} else {
|
} else {
|
||||||
*sc = data->scalars[(data->offset1 + idx) % POINTS];
|
*sc = data->scalars[(data->offset1 + idx) % POINTS];
|
||||||
*ge = data->pubkeys[(data->offset2 + idx - 1) % POINTS];
|
*ge = data->pubkeys[(data->offset2 + idx - 1) % POINTS];
|
||||||
|
@ -82,14 +82,14 @@ static void bench_ecmult_teardown(void* arg) {
|
||||||
size_t iter;
|
size_t iter;
|
||||||
/* Verify the results in teardown, to avoid doing comparisons while benchmarking. */
|
/* Verify the results in teardown, to avoid doing comparisons while benchmarking. */
|
||||||
for (iter = 0; iter < iters; ++iter) {
|
for (iter = 0; iter < iters; ++iter) {
|
||||||
rustsecp256k1_v0_1_0_gej tmp;
|
rustsecp256k1_v0_1_1_gej tmp;
|
||||||
rustsecp256k1_v0_1_0_gej_add_var(&tmp, &data->output[iter], &data->expected_output[iter], NULL);
|
rustsecp256k1_v0_1_1_gej_add_var(&tmp, &data->output[iter], &data->expected_output[iter], NULL);
|
||||||
CHECK(rustsecp256k1_v0_1_0_gej_is_infinity(&tmp));
|
CHECK(rustsecp256k1_v0_1_1_gej_is_infinity(&tmp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void generate_scalar(uint32_t num, rustsecp256k1_v0_1_0_scalar* scalar) {
|
static void generate_scalar(uint32_t num, rustsecp256k1_v0_1_1_scalar* scalar) {
|
||||||
rustsecp256k1_v0_1_0_sha256 sha256;
|
rustsecp256k1_v0_1_1_sha256 sha256;
|
||||||
unsigned char c[11] = {'e', 'c', 'm', 'u', 'l', 't', 0, 0, 0, 0};
|
unsigned char c[11] = {'e', 'c', 'm', 'u', 'l', 't', 0, 0, 0, 0};
|
||||||
unsigned char buf[32];
|
unsigned char buf[32];
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
|
@ -97,16 +97,16 @@ static void generate_scalar(uint32_t num, rustsecp256k1_v0_1_0_scalar* scalar) {
|
||||||
c[7] = num >> 8;
|
c[7] = num >> 8;
|
||||||
c[8] = num >> 16;
|
c[8] = num >> 16;
|
||||||
c[9] = num >> 24;
|
c[9] = num >> 24;
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&sha256);
|
rustsecp256k1_v0_1_1_sha256_initialize(&sha256);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha256, c, sizeof(c));
|
rustsecp256k1_v0_1_1_sha256_write(&sha256, c, sizeof(c));
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&sha256, buf);
|
rustsecp256k1_v0_1_1_sha256_finalize(&sha256, buf);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(scalar, buf, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(scalar, buf, &overflow);
|
||||||
CHECK(!overflow);
|
CHECK(!overflow);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void run_test(bench_data* data, size_t count, int includes_g) {
|
static void run_test(bench_data* data, size_t count, int includes_g) {
|
||||||
char str[32];
|
char str[32];
|
||||||
static const rustsecp256k1_v0_1_0_scalar zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0);
|
static const rustsecp256k1_v0_1_1_scalar zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0);
|
||||||
size_t iters = 1 + ITERS / count;
|
size_t iters = 1 + ITERS / count;
|
||||||
size_t iter;
|
size_t iter;
|
||||||
|
|
||||||
|
@ -117,15 +117,15 @@ static void run_test(bench_data* data, size_t count, int includes_g) {
|
||||||
data->offset1 = (data->count * 0x537b7f6f + 0x8f66a481) % POINTS;
|
data->offset1 = (data->count * 0x537b7f6f + 0x8f66a481) % POINTS;
|
||||||
data->offset2 = (data->count * 0x7f6f537b + 0x6a1a8f49) % POINTS;
|
data->offset2 = (data->count * 0x7f6f537b + 0x6a1a8f49) % POINTS;
|
||||||
for (iter = 0; iter < iters; ++iter) {
|
for (iter = 0; iter < iters; ++iter) {
|
||||||
rustsecp256k1_v0_1_0_scalar tmp;
|
rustsecp256k1_v0_1_1_scalar tmp;
|
||||||
rustsecp256k1_v0_1_0_scalar total = data->scalars[(data->offset1++) % POINTS];
|
rustsecp256k1_v0_1_1_scalar total = data->scalars[(data->offset1++) % POINTS];
|
||||||
size_t i = 0;
|
size_t i = 0;
|
||||||
for (i = 0; i + 1 < count; ++i) {
|
for (i = 0; i + 1 < count; ++i) {
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&tmp, &data->seckeys[(data->offset2++) % POINTS], &data->scalars[(data->offset1++) % POINTS]);
|
rustsecp256k1_v0_1_1_scalar_mul(&tmp, &data->seckeys[(data->offset2++) % POINTS], &data->scalars[(data->offset1++) % POINTS]);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&total, &total, &tmp);
|
rustsecp256k1_v0_1_1_scalar_add(&total, &total, &tmp);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&total, &total);
|
rustsecp256k1_v0_1_1_scalar_negate(&total, &total);
|
||||||
rustsecp256k1_v0_1_0_ecmult(&data->ctx->ecmult_ctx, &data->expected_output[iter], NULL, &zero, &total);
|
rustsecp256k1_v0_1_1_ecmult(&data->ctx->ecmult_ctx, &data->expected_output[iter], NULL, &zero, &total);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Run the benchmark. */
|
/* Run the benchmark. */
|
||||||
|
@ -136,25 +136,25 @@ static void run_test(bench_data* data, size_t count, int includes_g) {
|
||||||
int main(int argc, char **argv) {
|
int main(int argc, char **argv) {
|
||||||
bench_data data;
|
bench_data data;
|
||||||
int i, p;
|
int i, p;
|
||||||
rustsecp256k1_v0_1_0_gej* pubkeys_gej;
|
rustsecp256k1_v0_1_1_gej* pubkeys_gej;
|
||||||
size_t scratch_size;
|
size_t scratch_size;
|
||||||
|
|
||||||
data.ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
data.ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
||||||
scratch_size = rustsecp256k1_v0_1_0_strauss_scratch_size(POINTS) + STRAUSS_SCRATCH_OBJECTS*16;
|
scratch_size = rustsecp256k1_v0_1_1_strauss_scratch_size(POINTS) + STRAUSS_SCRATCH_OBJECTS*16;
|
||||||
data.scratch = rustsecp256k1_v0_1_0_scratch_space_create(data.ctx, scratch_size);
|
data.scratch = rustsecp256k1_v0_1_1_scratch_space_create(data.ctx, scratch_size);
|
||||||
data.ecmult_multi = rustsecp256k1_v0_1_0_ecmult_multi_var;
|
data.ecmult_multi = rustsecp256k1_v0_1_1_ecmult_multi_var;
|
||||||
|
|
||||||
if (argc > 1) {
|
if (argc > 1) {
|
||||||
if(have_flag(argc, argv, "pippenger_wnaf")) {
|
if(have_flag(argc, argv, "pippenger_wnaf")) {
|
||||||
printf("Using pippenger_wnaf:\n");
|
printf("Using pippenger_wnaf:\n");
|
||||||
data.ecmult_multi = rustsecp256k1_v0_1_0_ecmult_pippenger_batch_single;
|
data.ecmult_multi = rustsecp256k1_v0_1_1_ecmult_pippenger_batch_single;
|
||||||
} else if(have_flag(argc, argv, "strauss_wnaf")) {
|
} else if(have_flag(argc, argv, "strauss_wnaf")) {
|
||||||
printf("Using strauss_wnaf:\n");
|
printf("Using strauss_wnaf:\n");
|
||||||
data.ecmult_multi = rustsecp256k1_v0_1_0_ecmult_strauss_batch_single;
|
data.ecmult_multi = rustsecp256k1_v0_1_1_ecmult_strauss_batch_single;
|
||||||
} else if(have_flag(argc, argv, "simple")) {
|
} else if(have_flag(argc, argv, "simple")) {
|
||||||
printf("Using simple algorithm:\n");
|
printf("Using simple algorithm:\n");
|
||||||
data.ecmult_multi = rustsecp256k1_v0_1_0_ecmult_multi_var;
|
data.ecmult_multi = rustsecp256k1_v0_1_1_ecmult_multi_var;
|
||||||
rustsecp256k1_v0_1_0_scratch_space_destroy(data.ctx, data.scratch);
|
rustsecp256k1_v0_1_1_scratch_space_destroy(data.ctx, data.scratch);
|
||||||
data.scratch = NULL;
|
data.scratch = NULL;
|
||||||
} else {
|
} else {
|
||||||
fprintf(stderr, "%s: unrecognized argument '%s'.\n", argv[0], argv[1]);
|
fprintf(stderr, "%s: unrecognized argument '%s'.\n", argv[0], argv[1]);
|
||||||
|
@ -164,24 +164,24 @@ int main(int argc, char **argv) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Allocate stuff */
|
/* Allocate stuff */
|
||||||
data.scalars = malloc(sizeof(rustsecp256k1_v0_1_0_scalar) * POINTS);
|
data.scalars = malloc(sizeof(rustsecp256k1_v0_1_1_scalar) * POINTS);
|
||||||
data.seckeys = malloc(sizeof(rustsecp256k1_v0_1_0_scalar) * POINTS);
|
data.seckeys = malloc(sizeof(rustsecp256k1_v0_1_1_scalar) * POINTS);
|
||||||
data.pubkeys = malloc(sizeof(rustsecp256k1_v0_1_0_ge) * POINTS);
|
data.pubkeys = malloc(sizeof(rustsecp256k1_v0_1_1_ge) * POINTS);
|
||||||
data.expected_output = malloc(sizeof(rustsecp256k1_v0_1_0_gej) * (ITERS + 1));
|
data.expected_output = malloc(sizeof(rustsecp256k1_v0_1_1_gej) * (ITERS + 1));
|
||||||
data.output = malloc(sizeof(rustsecp256k1_v0_1_0_gej) * (ITERS + 1));
|
data.output = malloc(sizeof(rustsecp256k1_v0_1_1_gej) * (ITERS + 1));
|
||||||
|
|
||||||
/* Generate a set of scalars, and private/public keypairs. */
|
/* Generate a set of scalars, and private/public keypairs. */
|
||||||
pubkeys_gej = malloc(sizeof(rustsecp256k1_v0_1_0_gej) * POINTS);
|
pubkeys_gej = malloc(sizeof(rustsecp256k1_v0_1_1_gej) * POINTS);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&pubkeys_gej[0], &rustsecp256k1_v0_1_0_ge_const_g);
|
rustsecp256k1_v0_1_1_gej_set_ge(&pubkeys_gej[0], &rustsecp256k1_v0_1_1_ge_const_g);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&data.seckeys[0], 1);
|
rustsecp256k1_v0_1_1_scalar_set_int(&data.seckeys[0], 1);
|
||||||
for (i = 0; i < POINTS; ++i) {
|
for (i = 0; i < POINTS; ++i) {
|
||||||
generate_scalar(i, &data.scalars[i]);
|
generate_scalar(i, &data.scalars[i]);
|
||||||
if (i) {
|
if (i) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&pubkeys_gej[i], &pubkeys_gej[i - 1], NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&pubkeys_gej[i], &pubkeys_gej[i - 1], NULL);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data.seckeys[i], &data.seckeys[i - 1], &data.seckeys[i - 1]);
|
rustsecp256k1_v0_1_1_scalar_add(&data.seckeys[i], &data.seckeys[i - 1], &data.seckeys[i - 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_set_all_gej_var(data.pubkeys, pubkeys_gej, POINTS);
|
rustsecp256k1_v0_1_1_ge_set_all_gej_var(data.pubkeys, pubkeys_gej, POINTS);
|
||||||
free(pubkeys_gej);
|
free(pubkeys_gej);
|
||||||
|
|
||||||
for (i = 1; i <= 8; ++i) {
|
for (i = 1; i <= 8; ++i) {
|
||||||
|
@ -194,9 +194,9 @@ int main(int argc, char **argv) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (data.scratch != NULL) {
|
if (data.scratch != NULL) {
|
||||||
rustsecp256k1_v0_1_0_scratch_space_destroy(data.ctx, data.scratch);
|
rustsecp256k1_v0_1_1_scratch_space_destroy(data.ctx, data.scratch);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_context_destroy(data.ctx);
|
rustsecp256k1_v0_1_1_context_destroy(data.ctx);
|
||||||
free(data.scalars);
|
free(data.scalars);
|
||||||
free(data.pubkeys);
|
free(data.pubkeys);
|
||||||
free(data.seckeys);
|
free(data.seckeys);
|
||||||
|
|
|
@ -19,10 +19,10 @@
|
||||||
#include "secp256k1.c"
|
#include "secp256k1.c"
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_scalar scalar_x, scalar_y;
|
rustsecp256k1_v0_1_1_scalar scalar_x, scalar_y;
|
||||||
rustsecp256k1_v0_1_0_fe fe_x, fe_y;
|
rustsecp256k1_v0_1_1_fe fe_x, fe_y;
|
||||||
rustsecp256k1_v0_1_0_ge ge_x, ge_y;
|
rustsecp256k1_v0_1_1_ge ge_x, ge_y;
|
||||||
rustsecp256k1_v0_1_0_gej gej_x, gej_y;
|
rustsecp256k1_v0_1_1_gej gej_x, gej_y;
|
||||||
unsigned char data[64];
|
unsigned char data[64];
|
||||||
int wnaf[256];
|
int wnaf[256];
|
||||||
} bench_inv;
|
} bench_inv;
|
||||||
|
@ -44,14 +44,14 @@ void bench_setup(void* arg) {
|
||||||
0x11, 0x15, 0x17, 0x1b, 0x1d, 0xb1, 0xbf, 0xd3
|
0x11, 0x15, 0x17, 0x1b, 0x1d, 0xb1, 0xbf, 0xd3
|
||||||
};
|
};
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&data->scalar_x, init_x, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&data->scalar_x, init_x, NULL);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&data->scalar_y, init_y, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&data->scalar_y, init_y, NULL);
|
||||||
rustsecp256k1_v0_1_0_fe_set_b32(&data->fe_x, init_x);
|
rustsecp256k1_v0_1_1_fe_set_b32(&data->fe_x, init_x);
|
||||||
rustsecp256k1_v0_1_0_fe_set_b32(&data->fe_y, init_y);
|
rustsecp256k1_v0_1_1_fe_set_b32(&data->fe_y, init_y);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ge_set_xo_var(&data->ge_x, &data->fe_x, 0));
|
CHECK(rustsecp256k1_v0_1_1_ge_set_xo_var(&data->ge_x, &data->fe_x, 0));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ge_set_xo_var(&data->ge_y, &data->fe_y, 1));
|
CHECK(rustsecp256k1_v0_1_1_ge_set_xo_var(&data->ge_y, &data->fe_y, 1));
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&data->gej_x, &data->ge_x);
|
rustsecp256k1_v0_1_1_gej_set_ge(&data->gej_x, &data->ge_x);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&data->gej_y, &data->ge_y);
|
rustsecp256k1_v0_1_1_gej_set_ge(&data->gej_y, &data->ge_y);
|
||||||
memcpy(data->data, init_x, 32);
|
memcpy(data->data, init_x, 32);
|
||||||
memcpy(data->data + 32, init_y, 32);
|
memcpy(data->data + 32, init_y, 32);
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ void bench_scalar_add(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000000; i++) {
|
for (i = 0; i < 2000000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ void bench_scalar_negate(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000000; i++) {
|
for (i = 0; i < 2000000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&data->scalar_x, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_negate(&data->scalar_x, &data->scalar_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ void bench_scalar_sqr(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&data->scalar_x, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_sqr(&data->scalar_x, &data->scalar_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ void bench_scalar_mul(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_mul(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,9 +98,9 @@ void bench_scalar_split(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar l, r;
|
rustsecp256k1_v0_1_1_scalar l, r;
|
||||||
rustsecp256k1_v0_1_0_scalar_split_lambda(&l, &r, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_split_lambda(&l, &r, &data->scalar_x);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -110,8 +110,8 @@ void bench_scalar_inverse(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000; i++) {
|
for (i = 0; i < 2000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse(&data->scalar_x, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_inverse(&data->scalar_x, &data->scalar_x);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,8 +120,8 @@ void bench_scalar_inverse_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000; i++) {
|
for (i = 0; i < 2000; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse_var(&data->scalar_x, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_inverse_var(&data->scalar_x, &data->scalar_x);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ void bench_field_normalize(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000000; i++) {
|
for (i = 0; i < 2000000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&data->fe_x);
|
rustsecp256k1_v0_1_1_fe_normalize(&data->fe_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ void bench_field_normalize_weak(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 2000000; i++) {
|
for (i = 0; i < 2000000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&data->fe_x);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&data->fe_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ void bench_field_mul(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&data->fe_x, &data->fe_x, &data->fe_y);
|
rustsecp256k1_v0_1_1_fe_mul(&data->fe_x, &data->fe_x, &data->fe_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +157,7 @@ void bench_field_sqr(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&data->fe_x, &data->fe_x);
|
rustsecp256k1_v0_1_1_fe_sqr(&data->fe_x, &data->fe_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,8 +166,8 @@ void bench_field_inverse(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_inv(&data->fe_x, &data->fe_x);
|
rustsecp256k1_v0_1_1_fe_inv(&data->fe_x, &data->fe_x);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&data->fe_x, &data->fe_y);
|
rustsecp256k1_v0_1_1_fe_add(&data->fe_x, &data->fe_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,20 +176,20 @@ void bench_field_inverse_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_inv_var(&data->fe_x, &data->fe_x);
|
rustsecp256k1_v0_1_1_fe_inv_var(&data->fe_x, &data->fe_x);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&data->fe_x, &data->fe_y);
|
rustsecp256k1_v0_1_1_fe_add(&data->fe_x, &data->fe_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void bench_field_sqrt(void* arg) {
|
void bench_field_sqrt(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
rustsecp256k1_v0_1_0_fe t;
|
rustsecp256k1_v0_1_1_fe t;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
t = data->fe_x;
|
t = data->fe_x;
|
||||||
rustsecp256k1_v0_1_0_fe_sqrt(&data->fe_x, &t);
|
rustsecp256k1_v0_1_1_fe_sqrt(&data->fe_x, &t);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&data->fe_x, &data->fe_y);
|
rustsecp256k1_v0_1_1_fe_add(&data->fe_x, &data->fe_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,7 +198,7 @@ void bench_group_double_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&data->gej_x, &data->gej_x, NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&data->gej_x, &data->gej_x, NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +207,7 @@ void bench_group_add_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_add_var(&data->gej_x, &data->gej_x, &data->gej_y, NULL);
|
rustsecp256k1_v0_1_1_gej_add_var(&data->gej_x, &data->gej_x, &data->gej_y, NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -216,7 +216,7 @@ void bench_group_add_affine(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(&data->gej_x, &data->gej_x, &data->ge_y);
|
rustsecp256k1_v0_1_1_gej_add_ge(&data->gej_x, &data->gej_x, &data->ge_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ void bench_group_add_affine_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge_var(&data->gej_x, &data->gej_x, &data->ge_y, NULL);
|
rustsecp256k1_v0_1_1_gej_add_ge_var(&data->gej_x, &data->gej_x, &data->ge_y, NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ void bench_group_jacobi_var(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_has_quad_y_var(&data->gej_x);
|
rustsecp256k1_v0_1_1_gej_has_quad_y_var(&data->gej_x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -243,8 +243,8 @@ void bench_ecmult_wnaf(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_wnaf(data->wnaf, 256, &data->scalar_x, WINDOW_A);
|
rustsecp256k1_v0_1_1_ecmult_wnaf(data->wnaf, 256, &data->scalar_x, WINDOW_A);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,8 +253,8 @@ void bench_wnaf_const(void* arg) {
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_wnaf_const(data->wnaf, &data->scalar_x, WINDOW_A, 256);
|
rustsecp256k1_v0_1_1_wnaf_const(data->wnaf, &data->scalar_x, WINDOW_A, 256);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_add(&data->scalar_x, &data->scalar_x, &data->scalar_y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,35 +262,35 @@ void bench_wnaf_const(void* arg) {
|
||||||
void bench_sha256(void* arg) {
|
void bench_sha256(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
rustsecp256k1_v0_1_0_sha256 sha;
|
rustsecp256k1_v0_1_1_sha256 sha;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&sha);
|
rustsecp256k1_v0_1_1_sha256_initialize(&sha);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha, data->data, 32);
|
rustsecp256k1_v0_1_1_sha256_write(&sha, data->data, 32);
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&sha, data->data);
|
rustsecp256k1_v0_1_1_sha256_finalize(&sha, data->data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void bench_hmac_sha256(void* arg) {
|
void bench_hmac_sha256(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256 hmac;
|
rustsecp256k1_v0_1_1_hmac_sha256 hmac;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, data->data, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, data->data, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, data->data, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, data->data, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, data->data);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, data->data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void bench_rfc6979_hmac_sha256(void* arg) {
|
void bench_rfc6979_hmac_sha256(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 rng;
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 rng;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(&rng, data->data, 64);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(&rng, data->data, 64);
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rng, data->data, 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rng, data->data, 32);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -298,7 +298,7 @@ void bench_context_verify(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
(void)arg;
|
(void)arg;
|
||||||
for (i = 0; i < 20; i++) {
|
for (i = 0; i < 20; i++) {
|
||||||
rustsecp256k1_v0_1_0_context_destroy(rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_VERIFY));
|
rustsecp256k1_v0_1_1_context_destroy(rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_VERIFY));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -306,7 +306,7 @@ void bench_context_sign(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
(void)arg;
|
(void)arg;
|
||||||
for (i = 0; i < 200; i++) {
|
for (i = 0; i < 200; i++) {
|
||||||
rustsecp256k1_v0_1_0_context_destroy(rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN));
|
rustsecp256k1_v0_1_1_context_destroy(rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -314,14 +314,14 @@ void bench_context_sign(void* arg) {
|
||||||
void bench_num_jacobi(void* arg) {
|
void bench_num_jacobi(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_inv *data = (bench_inv*)arg;
|
bench_inv *data = (bench_inv*)arg;
|
||||||
rustsecp256k1_v0_1_0_num nx, norder;
|
rustsecp256k1_v0_1_1_num nx, norder;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_get_num(&nx, &data->scalar_x);
|
rustsecp256k1_v0_1_1_scalar_get_num(&nx, &data->scalar_x);
|
||||||
rustsecp256k1_v0_1_0_scalar_order_get_num(&norder);
|
rustsecp256k1_v0_1_1_scalar_order_get_num(&norder);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_num(&norder, &data->scalar_y);
|
rustsecp256k1_v0_1_1_scalar_get_num(&norder, &data->scalar_y);
|
||||||
|
|
||||||
for (i = 0; i < 200000; i++) {
|
for (i = 0; i < 200000; i++) {
|
||||||
rustsecp256k1_v0_1_0_num_jacobi(&nx, &norder);
|
rustsecp256k1_v0_1_1_num_jacobi(&nx, &norder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#include "bench.h"
|
#include "bench.h"
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_context *ctx;
|
rustsecp256k1_v0_1_1_context *ctx;
|
||||||
unsigned char msg[32];
|
unsigned char msg[32];
|
||||||
unsigned char sig[64];
|
unsigned char sig[64];
|
||||||
} bench_recover_data;
|
} bench_recover_data;
|
||||||
|
@ -18,16 +18,16 @@ typedef struct {
|
||||||
void bench_recover(void* arg) {
|
void bench_recover(void* arg) {
|
||||||
int i;
|
int i;
|
||||||
bench_recover_data *data = (bench_recover_data*)arg;
|
bench_recover_data *data = (bench_recover_data*)arg;
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
unsigned char pubkeyc[33];
|
unsigned char pubkeyc[33];
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
int j;
|
int j;
|
||||||
size_t pubkeylen = 33;
|
size_t pubkeylen = 33;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature sig;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(data->ctx, &sig, data->sig, i % 2));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(data->ctx, &sig, data->sig, i % 2));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(data->ctx, &pubkey, &sig, data->msg));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(data->ctx, &pubkey, &sig, data->msg));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_serialize(data->ctx, pubkeyc, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED));
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_serialize(data->ctx, pubkeyc, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED));
|
||||||
for (j = 0; j < 32; j++) {
|
for (j = 0; j < 32; j++) {
|
||||||
data->sig[j + 32] = data->msg[j]; /* Move former message to S. */
|
data->sig[j + 32] = data->msg[j]; /* Move former message to S. */
|
||||||
data->msg[j] = data->sig[j]; /* Move former R to message. */
|
data->msg[j] = data->sig[j]; /* Move former R to message. */
|
||||||
|
@ -51,10 +51,10 @@ void bench_recover_setup(void* arg) {
|
||||||
int main(void) {
|
int main(void) {
|
||||||
bench_recover_data data;
|
bench_recover_data data;
|
||||||
|
|
||||||
data.ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_VERIFY);
|
data.ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_VERIFY);
|
||||||
|
|
||||||
run_benchmark("ecdsa_recover", bench_recover, bench_recover_setup, NULL, &data, 10, 20000);
|
run_benchmark("ecdsa_recover", bench_recover, bench_recover_setup, NULL, &data, 10, 20000);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_destroy(data.ctx);
|
rustsecp256k1_v0_1_1_context_destroy(data.ctx);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
#include "bench.h"
|
#include "bench.h"
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_context* ctx;
|
rustsecp256k1_v0_1_1_context* ctx;
|
||||||
unsigned char msg[32];
|
unsigned char msg[32];
|
||||||
unsigned char key[32];
|
unsigned char key[32];
|
||||||
} bench_sign;
|
} bench_sign;
|
||||||
|
@ -34,9 +34,9 @@ static void bench_sign_run(void* arg) {
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
size_t siglen = 74;
|
size_t siglen = 74;
|
||||||
int j;
|
int j;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature signature;
|
rustsecp256k1_v0_1_1_ecdsa_signature signature;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign(data->ctx, &signature, data->msg, data->key, NULL, NULL));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign(data->ctx, &signature, data->msg, data->key, NULL, NULL));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(data->ctx, sig, &siglen, &signature));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der(data->ctx, sig, &siglen, &signature));
|
||||||
for (j = 0; j < 32; j++) {
|
for (j = 0; j < 32; j++) {
|
||||||
data->msg[j] = sig[j];
|
data->msg[j] = sig[j];
|
||||||
data->key[j] = sig[j + 32];
|
data->key[j] = sig[j + 32];
|
||||||
|
@ -47,10 +47,10 @@ static void bench_sign_run(void* arg) {
|
||||||
int main(void) {
|
int main(void) {
|
||||||
bench_sign data;
|
bench_sign data;
|
||||||
|
|
||||||
data.ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN);
|
data.ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN);
|
||||||
|
|
||||||
run_benchmark("ecdsa_sign", bench_sign_run, bench_sign_setup, NULL, &data, 10, 20000);
|
run_benchmark("ecdsa_sign", bench_sign_run, bench_sign_setup, NULL, &data, 10, 20000);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_destroy(data.ctx);
|
rustsecp256k1_v0_1_1_context_destroy(data.ctx);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_context *ctx;
|
rustsecp256k1_v0_1_1_context *ctx;
|
||||||
unsigned char msg[32];
|
unsigned char msg[32];
|
||||||
unsigned char key[32];
|
unsigned char key[32];
|
||||||
unsigned char sig[72];
|
unsigned char sig[72];
|
||||||
|
@ -35,14 +35,14 @@ static void benchmark_verify(void* arg) {
|
||||||
benchmark_verify_t* data = (benchmark_verify_t*)arg;
|
benchmark_verify_t* data = (benchmark_verify_t*)arg;
|
||||||
|
|
||||||
for (i = 0; i < 20000; i++) {
|
for (i = 0; i < 20000; i++) {
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
data->sig[data->siglen - 1] ^= (i & 0xFF);
|
data->sig[data->siglen - 1] ^= (i & 0xFF);
|
||||||
data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF);
|
data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF);
|
||||||
data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF);
|
data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_parse(data->ctx, &pubkey, data->pubkey, data->pubkeylen) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_parse(data->ctx, &pubkey, data->pubkey, data->pubkeylen) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(data->ctx, &sig, data->sig, data->siglen) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(data->ctx, &sig, data->sig, data->siglen) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(data->ctx, &sig, data->msg, &pubkey) == (i == 0));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(data->ctx, &sig, data->msg, &pubkey) == (i == 0));
|
||||||
data->sig[data->siglen - 1] ^= (i & 0xFF);
|
data->sig[data->siglen - 1] ^= (i & 0xFF);
|
||||||
data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF);
|
data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF);
|
||||||
data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF);
|
data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF);
|
||||||
|
@ -81,11 +81,11 @@ static void benchmark_verify_openssl(void* arg) {
|
||||||
|
|
||||||
int main(void) {
|
int main(void) {
|
||||||
int i;
|
int i;
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
benchmark_verify_t data;
|
benchmark_verify_t data;
|
||||||
|
|
||||||
data.ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
data.ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
||||||
|
|
||||||
for (i = 0; i < 32; i++) {
|
for (i = 0; i < 32; i++) {
|
||||||
data.msg[i] = 1 + i;
|
data.msg[i] = 1 + i;
|
||||||
|
@ -94,11 +94,11 @@ int main(void) {
|
||||||
data.key[i] = 33 + i;
|
data.key[i] = 33 + i;
|
||||||
}
|
}
|
||||||
data.siglen = 72;
|
data.siglen = 72;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign(data.ctx, &sig, data.msg, data.key, NULL, NULL));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign(data.ctx, &sig, data.msg, data.key, NULL, NULL));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(data.ctx, data.sig, &data.siglen, &sig));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der(data.ctx, data.sig, &data.siglen, &sig));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(data.ctx, &pubkey, data.key));
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(data.ctx, &pubkey, data.key));
|
||||||
data.pubkeylen = 33;
|
data.pubkeylen = 33;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_serialize(data.ctx, data.pubkey, &data.pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_serialize(data.ctx, data.pubkey, &data.pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED) == 1);
|
||||||
|
|
||||||
run_benchmark("ecdsa_verify", benchmark_verify, NULL, NULL, &data, 10, 20000);
|
run_benchmark("ecdsa_verify", benchmark_verify, NULL, NULL, &data, 10, 20000);
|
||||||
#ifdef ENABLE_OPENSSL_TESTS
|
#ifdef ENABLE_OPENSSL_TESTS
|
||||||
|
@ -107,6 +107,6 @@ int main(void) {
|
||||||
EC_GROUP_free(data.ec_group);
|
EC_GROUP_free(data.ec_group);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_destroy(data.ctx);
|
rustsecp256k1_v0_1_1_context_destroy(data.ctx);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,9 +13,9 @@
|
||||||
#include "group.h"
|
#include "group.h"
|
||||||
#include "ecmult.h"
|
#include "ecmult.h"
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_parse(rustsecp256k1_v0_1_0_scalar *r, rustsecp256k1_v0_1_0_scalar *s, const unsigned char *sig, size_t size);
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_parse(rustsecp256k1_v0_1_1_scalar *r, rustsecp256k1_v0_1_1_scalar *s, const unsigned char *sig, size_t size);
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *s);
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *s);
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_verify(const rustsecp256k1_v0_1_0_ecmult_context *ctx, const rustsecp256k1_v0_1_0_scalar* r, const rustsecp256k1_v0_1_0_scalar* s, const rustsecp256k1_v0_1_0_ge *pubkey, const rustsecp256k1_v0_1_0_scalar *message);
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_verify(const rustsecp256k1_v0_1_1_ecmult_context *ctx, const rustsecp256k1_v0_1_1_scalar* r, const rustsecp256k1_v0_1_1_scalar* s, const rustsecp256k1_v0_1_1_ge *pubkey, const rustsecp256k1_v0_1_1_scalar *message);
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_sign(const rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, rustsecp256k1_v0_1_0_scalar* r, rustsecp256k1_v0_1_0_scalar* s, const rustsecp256k1_v0_1_0_scalar *seckey, const rustsecp256k1_v0_1_0_scalar *message, const rustsecp256k1_v0_1_0_scalar *nonce, int *recid);
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_sign(const rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, rustsecp256k1_v0_1_1_scalar* r, rustsecp256k1_v0_1_1_scalar* s, const rustsecp256k1_v0_1_1_scalar *seckey, const rustsecp256k1_v0_1_1_scalar *message, const rustsecp256k1_v0_1_1_scalar *nonce, int *recid);
|
||||||
|
|
||||||
#endif /* SECP256K1_ECDSA_H */
|
#endif /* SECP256K1_ECDSA_H */
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
* sage: '%x' % (EllipticCurve ([F (a), F (b)]).order())
|
* sage: '%x' % (EllipticCurve ([F (a), F (b)]).order())
|
||||||
* 'fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'
|
* 'fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'
|
||||||
*/
|
*/
|
||||||
static const rustsecp256k1_v0_1_0_fe rustsecp256k1_v0_1_0_ecdsa_const_order_as_fe = SECP256K1_FE_CONST(
|
static const rustsecp256k1_v0_1_1_fe rustsecp256k1_v0_1_1_ecdsa_const_order_as_fe = SECP256K1_FE_CONST(
|
||||||
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL,
|
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL,
|
||||||
0xBAAEDCE6UL, 0xAF48A03BUL, 0xBFD25E8CUL, 0xD0364141UL
|
0xBAAEDCE6UL, 0xAF48A03BUL, 0xBFD25E8CUL, 0xD0364141UL
|
||||||
);
|
);
|
||||||
|
@ -42,11 +42,11 @@ static const rustsecp256k1_v0_1_0_fe rustsecp256k1_v0_1_0_ecdsa_const_order_as_f
|
||||||
* sage: '%x' % (p - EllipticCurve ([F (a), F (b)]).order())
|
* sage: '%x' % (p - EllipticCurve ([F (a), F (b)]).order())
|
||||||
* '14551231950b75fc4402da1722fc9baee'
|
* '14551231950b75fc4402da1722fc9baee'
|
||||||
*/
|
*/
|
||||||
static const rustsecp256k1_v0_1_0_fe rustsecp256k1_v0_1_0_ecdsa_const_p_minus_order = SECP256K1_FE_CONST(
|
static const rustsecp256k1_v0_1_1_fe rustsecp256k1_v0_1_1_ecdsa_const_p_minus_order = SECP256K1_FE_CONST(
|
||||||
0, 0, 0, 1, 0x45512319UL, 0x50B75FC4UL, 0x402DA172UL, 0x2FC9BAEEUL
|
0, 0, 0, 1, 0x45512319UL, 0x50B75FC4UL, 0x402DA172UL, 0x2FC9BAEEUL
|
||||||
);
|
);
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_der_read_len(const unsigned char **sigp, const unsigned char *sigend) {
|
static int rustsecp256k1_v0_1_1_der_read_len(const unsigned char **sigp, const unsigned char *sigend) {
|
||||||
int lenleft, b1;
|
int lenleft, b1;
|
||||||
size_t ret = 0;
|
size_t ret = 0;
|
||||||
if (*sigp >= sigend) {
|
if (*sigp >= sigend) {
|
||||||
|
@ -96,7 +96,7 @@ static int rustsecp256k1_v0_1_0_der_read_len(const unsigned char **sigp, const u
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_der_parse_integer(rustsecp256k1_v0_1_0_scalar *r, const unsigned char **sig, const unsigned char *sigend) {
|
static int rustsecp256k1_v0_1_1_der_parse_integer(rustsecp256k1_v0_1_1_scalar *r, const unsigned char **sig, const unsigned char *sigend) {
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
unsigned char ra[32] = {0};
|
unsigned char ra[32] = {0};
|
||||||
int rlen;
|
int rlen;
|
||||||
|
@ -106,7 +106,7 @@ static int rustsecp256k1_v0_1_0_der_parse_integer(rustsecp256k1_v0_1_0_scalar *r
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
(*sig)++;
|
(*sig)++;
|
||||||
rlen = rustsecp256k1_v0_1_0_der_read_len(sig, sigend);
|
rlen = rustsecp256k1_v0_1_1_der_read_len(sig, sigend);
|
||||||
if (rlen <= 0 || (*sig) + rlen > sigend) {
|
if (rlen <= 0 || (*sig) + rlen > sigend) {
|
||||||
/* Exceeds bounds or not at least length 1 (X.690-0207 8.3.1). */
|
/* Exceeds bounds or not at least length 1 (X.690-0207 8.3.1). */
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -133,23 +133,23 @@ static int rustsecp256k1_v0_1_0_der_parse_integer(rustsecp256k1_v0_1_0_scalar *r
|
||||||
}
|
}
|
||||||
if (!overflow) {
|
if (!overflow) {
|
||||||
memcpy(ra + 32 - rlen, *sig, rlen);
|
memcpy(ra + 32 - rlen, *sig, rlen);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(r, ra, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(r, ra, &overflow);
|
||||||
}
|
}
|
||||||
if (overflow) {
|
if (overflow) {
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(r, 0);
|
rustsecp256k1_v0_1_1_scalar_set_int(r, 0);
|
||||||
}
|
}
|
||||||
(*sig) += rlen;
|
(*sig) += rlen;
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_parse(rustsecp256k1_v0_1_0_scalar *rr, rustsecp256k1_v0_1_0_scalar *rs, const unsigned char *sig, size_t size) {
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_parse(rustsecp256k1_v0_1_1_scalar *rr, rustsecp256k1_v0_1_1_scalar *rs, const unsigned char *sig, size_t size) {
|
||||||
const unsigned char *sigend = sig + size;
|
const unsigned char *sigend = sig + size;
|
||||||
int rlen;
|
int rlen;
|
||||||
if (sig == sigend || *(sig++) != 0x30) {
|
if (sig == sigend || *(sig++) != 0x30) {
|
||||||
/* The encoding doesn't start with a constructed sequence (X.690-0207 8.9.1). */
|
/* The encoding doesn't start with a constructed sequence (X.690-0207 8.9.1). */
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rlen = rustsecp256k1_v0_1_0_der_read_len(&sig, sigend);
|
rlen = rustsecp256k1_v0_1_1_der_read_len(&sig, sigend);
|
||||||
if (rlen < 0 || sig + rlen > sigend) {
|
if (rlen < 0 || sig + rlen > sigend) {
|
||||||
/* Tuple exceeds bounds */
|
/* Tuple exceeds bounds */
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -159,10 +159,10 @@ static int rustsecp256k1_v0_1_0_ecdsa_sig_parse(rustsecp256k1_v0_1_0_scalar *rr,
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!rustsecp256k1_v0_1_0_der_parse_integer(rr, &sig, sigend)) {
|
if (!rustsecp256k1_v0_1_1_der_parse_integer(rr, &sig, sigend)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (!rustsecp256k1_v0_1_0_der_parse_integer(rs, &sig, sigend)) {
|
if (!rustsecp256k1_v0_1_1_der_parse_integer(rs, &sig, sigend)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,12 +174,12 @@ static int rustsecp256k1_v0_1_0_ecdsa_sig_parse(rustsecp256k1_v0_1_0_scalar *rr,
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1_v0_1_0_scalar* ar, const rustsecp256k1_v0_1_0_scalar* as) {
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1_v0_1_1_scalar* ar, const rustsecp256k1_v0_1_1_scalar* as) {
|
||||||
unsigned char r[33] = {0}, s[33] = {0};
|
unsigned char r[33] = {0}, s[33] = {0};
|
||||||
unsigned char *rp = r, *sp = s;
|
unsigned char *rp = r, *sp = s;
|
||||||
size_t lenR = 33, lenS = 33;
|
size_t lenR = 33, lenS = 33;
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&r[1], ar);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&r[1], ar);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&s[1], as);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&s[1], as);
|
||||||
while (lenR > 1 && rp[0] == 0 && rp[1] < 0x80) { lenR--; rp++; }
|
while (lenR > 1 && rp[0] == 0 && rp[1] < 0x80) { lenR--; rp++; }
|
||||||
while (lenS > 1 && sp[0] == 0 && sp[1] < 0x80) { lenS--; sp++; }
|
while (lenS > 1 && sp[0] == 0 && sp[1] < 0x80) { lenS--; sp++; }
|
||||||
if (*size < 6+lenS+lenR) {
|
if (*size < 6+lenS+lenR) {
|
||||||
|
@ -198,42 +198,42 @@ static int rustsecp256k1_v0_1_0_ecdsa_sig_serialize(unsigned char *sig, size_t *
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_verify(const rustsecp256k1_v0_1_0_ecmult_context *ctx, const rustsecp256k1_v0_1_0_scalar *sigr, const rustsecp256k1_v0_1_0_scalar *sigs, const rustsecp256k1_v0_1_0_ge *pubkey, const rustsecp256k1_v0_1_0_scalar *message) {
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_verify(const rustsecp256k1_v0_1_1_ecmult_context *ctx, const rustsecp256k1_v0_1_1_scalar *sigr, const rustsecp256k1_v0_1_1_scalar *sigs, const rustsecp256k1_v0_1_1_ge *pubkey, const rustsecp256k1_v0_1_1_scalar *message) {
|
||||||
unsigned char c[32];
|
unsigned char c[32];
|
||||||
rustsecp256k1_v0_1_0_scalar sn, u1, u2;
|
rustsecp256k1_v0_1_1_scalar sn, u1, u2;
|
||||||
#if !defined(EXHAUSTIVE_TEST_ORDER)
|
#if !defined(EXHAUSTIVE_TEST_ORDER)
|
||||||
rustsecp256k1_v0_1_0_fe xr;
|
rustsecp256k1_v0_1_1_fe xr;
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_gej pubkeyj;
|
rustsecp256k1_v0_1_1_gej pubkeyj;
|
||||||
rustsecp256k1_v0_1_0_gej pr;
|
rustsecp256k1_v0_1_1_gej pr;
|
||||||
|
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(sigr) || rustsecp256k1_v0_1_0_scalar_is_zero(sigs)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(sigr) || rustsecp256k1_v0_1_1_scalar_is_zero(sigs)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse_var(&sn, sigs);
|
rustsecp256k1_v0_1_1_scalar_inverse_var(&sn, sigs);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u1, &sn, message);
|
rustsecp256k1_v0_1_1_scalar_mul(&u1, &sn, message);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u2, &sn, sigr);
|
rustsecp256k1_v0_1_1_scalar_mul(&u2, &sn, sigr);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&pubkeyj, pubkey);
|
rustsecp256k1_v0_1_1_gej_set_ge(&pubkeyj, pubkey);
|
||||||
rustsecp256k1_v0_1_0_ecmult(ctx, &pr, &pubkeyj, &u2, &u1);
|
rustsecp256k1_v0_1_1_ecmult(ctx, &pr, &pubkeyj, &u2, &u1);
|
||||||
if (rustsecp256k1_v0_1_0_gej_is_infinity(&pr)) {
|
if (rustsecp256k1_v0_1_1_gej_is_infinity(&pr)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(EXHAUSTIVE_TEST_ORDER)
|
#if defined(EXHAUSTIVE_TEST_ORDER)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_scalar computed_r;
|
rustsecp256k1_v0_1_1_scalar computed_r;
|
||||||
rustsecp256k1_v0_1_0_ge pr_ge;
|
rustsecp256k1_v0_1_1_ge pr_ge;
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&pr_ge, &pr);
|
rustsecp256k1_v0_1_1_ge_set_gej(&pr_ge, &pr);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&pr_ge.x);
|
rustsecp256k1_v0_1_1_fe_normalize(&pr_ge.x);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(c, &pr_ge.x);
|
rustsecp256k1_v0_1_1_fe_get_b32(c, &pr_ge.x);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&computed_r, c, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&computed_r, c, NULL);
|
||||||
return rustsecp256k1_v0_1_0_scalar_eq(sigr, &computed_r);
|
return rustsecp256k1_v0_1_1_scalar_eq(sigr, &computed_r);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(c, sigr);
|
rustsecp256k1_v0_1_1_scalar_get_b32(c, sigr);
|
||||||
rustsecp256k1_v0_1_0_fe_set_b32(&xr, c);
|
rustsecp256k1_v0_1_1_fe_set_b32(&xr, c);
|
||||||
|
|
||||||
/** We now have the recomputed R point in pr, and its claimed x coordinate (modulo n)
|
/** We now have the recomputed R point in pr, and its claimed x coordinate (modulo n)
|
||||||
* in xr. Naively, we would extract the x coordinate from pr (requiring a inversion modulo p),
|
* in xr. Naively, we would extract the x coordinate from pr (requiring a inversion modulo p),
|
||||||
|
@ -249,18 +249,18 @@ static int rustsecp256k1_v0_1_0_ecdsa_sig_verify(const rustsecp256k1_v0_1_0_ecmu
|
||||||
* <=> (xr * pr.z^2 mod p == pr.x) || (xr + n < p && (xr + n) * pr.z^2 mod p == pr.x)
|
* <=> (xr * pr.z^2 mod p == pr.x) || (xr + n < p && (xr + n) * pr.z^2 mod p == pr.x)
|
||||||
*
|
*
|
||||||
* Thus, we can avoid the inversion, but we have to check both cases separately.
|
* Thus, we can avoid the inversion, but we have to check both cases separately.
|
||||||
* rustsecp256k1_v0_1_0_gej_eq_x implements the (xr * pr.z^2 mod p == pr.x) test.
|
* rustsecp256k1_v0_1_1_gej_eq_x implements the (xr * pr.z^2 mod p == pr.x) test.
|
||||||
*/
|
*/
|
||||||
if (rustsecp256k1_v0_1_0_gej_eq_x_var(&xr, &pr)) {
|
if (rustsecp256k1_v0_1_1_gej_eq_x_var(&xr, &pr)) {
|
||||||
/* xr * pr.z^2 mod p == pr.x, so the signature is valid. */
|
/* xr * pr.z^2 mod p == pr.x, so the signature is valid. */
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
if (rustsecp256k1_v0_1_0_fe_cmp_var(&xr, &rustsecp256k1_v0_1_0_ecdsa_const_p_minus_order) >= 0) {
|
if (rustsecp256k1_v0_1_1_fe_cmp_var(&xr, &rustsecp256k1_v0_1_1_ecdsa_const_p_minus_order) >= 0) {
|
||||||
/* xr + n >= p, so we can skip testing the second case. */
|
/* xr + n >= p, so we can skip testing the second case. */
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_add(&xr, &rustsecp256k1_v0_1_0_ecdsa_const_order_as_fe);
|
rustsecp256k1_v0_1_1_fe_add(&xr, &rustsecp256k1_v0_1_1_ecdsa_const_order_as_fe);
|
||||||
if (rustsecp256k1_v0_1_0_gej_eq_x_var(&xr, &pr)) {
|
if (rustsecp256k1_v0_1_1_gej_eq_x_var(&xr, &pr)) {
|
||||||
/* (xr + n) * pr.z^2 mod p == pr.x, so the signature is valid. */
|
/* (xr + n) * pr.z^2 mod p == pr.x, so the signature is valid. */
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
@ -268,41 +268,41 @@ static int rustsecp256k1_v0_1_0_ecdsa_sig_verify(const rustsecp256k1_v0_1_0_ecmu
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_sign(const rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, rustsecp256k1_v0_1_0_scalar *sigr, rustsecp256k1_v0_1_0_scalar *sigs, const rustsecp256k1_v0_1_0_scalar *seckey, const rustsecp256k1_v0_1_0_scalar *message, const rustsecp256k1_v0_1_0_scalar *nonce, int *recid) {
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_sign(const rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, rustsecp256k1_v0_1_1_scalar *sigr, rustsecp256k1_v0_1_1_scalar *sigs, const rustsecp256k1_v0_1_1_scalar *seckey, const rustsecp256k1_v0_1_1_scalar *message, const rustsecp256k1_v0_1_1_scalar *nonce, int *recid) {
|
||||||
unsigned char b[32];
|
unsigned char b[32];
|
||||||
rustsecp256k1_v0_1_0_gej rp;
|
rustsecp256k1_v0_1_1_gej rp;
|
||||||
rustsecp256k1_v0_1_0_ge r;
|
rustsecp256k1_v0_1_1_ge r;
|
||||||
rustsecp256k1_v0_1_0_scalar n;
|
rustsecp256k1_v0_1_1_scalar n;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen(ctx, &rp, nonce);
|
rustsecp256k1_v0_1_1_ecmult_gen(ctx, &rp, nonce);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&r, &rp);
|
rustsecp256k1_v0_1_1_ge_set_gej(&r, &rp);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&r.x);
|
rustsecp256k1_v0_1_1_fe_normalize(&r.x);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&r.y);
|
rustsecp256k1_v0_1_1_fe_normalize(&r.y);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(b, &r.x);
|
rustsecp256k1_v0_1_1_fe_get_b32(b, &r.x);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(sigr, b, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(sigr, b, &overflow);
|
||||||
/* These two conditions should be checked before calling */
|
/* These two conditions should be checked before calling */
|
||||||
VERIFY_CHECK(!rustsecp256k1_v0_1_0_scalar_is_zero(sigr));
|
VERIFY_CHECK(!rustsecp256k1_v0_1_1_scalar_is_zero(sigr));
|
||||||
VERIFY_CHECK(overflow == 0);
|
VERIFY_CHECK(overflow == 0);
|
||||||
|
|
||||||
if (recid) {
|
if (recid) {
|
||||||
/* The overflow condition is cryptographically unreachable as hitting it requires finding the discrete log
|
/* The overflow condition is cryptographically unreachable as hitting it requires finding the discrete log
|
||||||
* of some P where P.x >= order, and only 1 in about 2^127 points meet this criteria.
|
* of some P where P.x >= order, and only 1 in about 2^127 points meet this criteria.
|
||||||
*/
|
*/
|
||||||
*recid = (overflow ? 2 : 0) | (rustsecp256k1_v0_1_0_fe_is_odd(&r.y) ? 1 : 0);
|
*recid = (overflow ? 2 : 0) | (rustsecp256k1_v0_1_1_fe_is_odd(&r.y) ? 1 : 0);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&n, sigr, seckey);
|
rustsecp256k1_v0_1_1_scalar_mul(&n, sigr, seckey);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&n, &n, message);
|
rustsecp256k1_v0_1_1_scalar_add(&n, &n, message);
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse(sigs, nonce);
|
rustsecp256k1_v0_1_1_scalar_inverse(sigs, nonce);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(sigs, sigs, &n);
|
rustsecp256k1_v0_1_1_scalar_mul(sigs, sigs, &n);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&n);
|
rustsecp256k1_v0_1_1_scalar_clear(&n);
|
||||||
rustsecp256k1_v0_1_0_gej_clear(&rp);
|
rustsecp256k1_v0_1_1_gej_clear(&rp);
|
||||||
rustsecp256k1_v0_1_0_ge_clear(&r);
|
rustsecp256k1_v0_1_1_ge_clear(&r);
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(sigs)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(sigs)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_high(sigs)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_high(sigs)) {
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(sigs, sigs);
|
rustsecp256k1_v0_1_1_scalar_negate(sigs, sigs);
|
||||||
if (recid) {
|
if (recid) {
|
||||||
*recid ^= 1;
|
*recid ^= 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,12 +14,12 @@
|
||||||
#include "ecmult.h"
|
#include "ecmult.h"
|
||||||
#include "ecmult_gen.h"
|
#include "ecmult_gen.h"
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_parse(rustsecp256k1_v0_1_0_ge *elem, const unsigned char *pub, size_t size);
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_parse(rustsecp256k1_v0_1_1_ge *elem, const unsigned char *pub, size_t size);
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_serialize(rustsecp256k1_v0_1_0_ge *elem, unsigned char *pub, size_t *size, int compressed);
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_serialize(rustsecp256k1_v0_1_1_ge *elem, unsigned char *pub, size_t *size, int compressed);
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_privkey_tweak_add(rustsecp256k1_v0_1_0_scalar *key, const rustsecp256k1_v0_1_0_scalar *tweak);
|
static int rustsecp256k1_v0_1_1_eckey_privkey_tweak_add(rustsecp256k1_v0_1_1_scalar *key, const rustsecp256k1_v0_1_1_scalar *tweak);
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_tweak_add(const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_ge *key, const rustsecp256k1_v0_1_0_scalar *tweak);
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_tweak_add(const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_ge *key, const rustsecp256k1_v0_1_1_scalar *tweak);
|
||||||
static int rustsecp256k1_v0_1_0_eckey_privkey_tweak_mul(rustsecp256k1_v0_1_0_scalar *key, const rustsecp256k1_v0_1_0_scalar *tweak);
|
static int rustsecp256k1_v0_1_1_eckey_privkey_tweak_mul(rustsecp256k1_v0_1_1_scalar *key, const rustsecp256k1_v0_1_1_scalar *tweak);
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_tweak_mul(const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_ge *key, const rustsecp256k1_v0_1_0_scalar *tweak);
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_tweak_mul(const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_ge *key, const rustsecp256k1_v0_1_1_scalar *tweak);
|
||||||
|
|
||||||
#endif /* SECP256K1_ECKEY_H */
|
#endif /* SECP256K1_ECKEY_H */
|
||||||
|
|
|
@ -14,86 +14,86 @@
|
||||||
#include "group.h"
|
#include "group.h"
|
||||||
#include "ecmult_gen.h"
|
#include "ecmult_gen.h"
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_parse(rustsecp256k1_v0_1_0_ge *elem, const unsigned char *pub, size_t size) {
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_parse(rustsecp256k1_v0_1_1_ge *elem, const unsigned char *pub, size_t size) {
|
||||||
if (size == 33 && (pub[0] == SECP256K1_TAG_PUBKEY_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_ODD)) {
|
if (size == 33 && (pub[0] == SECP256K1_TAG_PUBKEY_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_ODD)) {
|
||||||
rustsecp256k1_v0_1_0_fe x;
|
rustsecp256k1_v0_1_1_fe x;
|
||||||
return rustsecp256k1_v0_1_0_fe_set_b32(&x, pub+1) && rustsecp256k1_v0_1_0_ge_set_xo_var(elem, &x, pub[0] == SECP256K1_TAG_PUBKEY_ODD);
|
return rustsecp256k1_v0_1_1_fe_set_b32(&x, pub+1) && rustsecp256k1_v0_1_1_ge_set_xo_var(elem, &x, pub[0] == SECP256K1_TAG_PUBKEY_ODD);
|
||||||
} else if (size == 65 && (pub[0] == SECP256K1_TAG_PUBKEY_UNCOMPRESSED || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) {
|
} else if (size == 65 && (pub[0] == SECP256K1_TAG_PUBKEY_UNCOMPRESSED || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) {
|
||||||
rustsecp256k1_v0_1_0_fe x, y;
|
rustsecp256k1_v0_1_1_fe x, y;
|
||||||
if (!rustsecp256k1_v0_1_0_fe_set_b32(&x, pub+1) || !rustsecp256k1_v0_1_0_fe_set_b32(&y, pub+33)) {
|
if (!rustsecp256k1_v0_1_1_fe_set_b32(&x, pub+1) || !rustsecp256k1_v0_1_1_fe_set_b32(&y, pub+33)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_set_xy(elem, &x, &y);
|
rustsecp256k1_v0_1_1_ge_set_xy(elem, &x, &y);
|
||||||
if ((pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD) &&
|
if ((pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD) &&
|
||||||
rustsecp256k1_v0_1_0_fe_is_odd(&y) != (pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) {
|
rustsecp256k1_v0_1_1_fe_is_odd(&y) != (pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return rustsecp256k1_v0_1_0_ge_is_valid_var(elem);
|
return rustsecp256k1_v0_1_1_ge_is_valid_var(elem);
|
||||||
} else {
|
} else {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_serialize(rustsecp256k1_v0_1_0_ge *elem, unsigned char *pub, size_t *size, int compressed) {
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_serialize(rustsecp256k1_v0_1_1_ge *elem, unsigned char *pub, size_t *size, int compressed) {
|
||||||
if (rustsecp256k1_v0_1_0_ge_is_infinity(elem)) {
|
if (rustsecp256k1_v0_1_1_ge_is_infinity(elem)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&elem->x);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&elem->x);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&elem->y);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&elem->y);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(&pub[1], &elem->x);
|
rustsecp256k1_v0_1_1_fe_get_b32(&pub[1], &elem->x);
|
||||||
if (compressed) {
|
if (compressed) {
|
||||||
*size = 33;
|
*size = 33;
|
||||||
pub[0] = rustsecp256k1_v0_1_0_fe_is_odd(&elem->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN;
|
pub[0] = rustsecp256k1_v0_1_1_fe_is_odd(&elem->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN;
|
||||||
} else {
|
} else {
|
||||||
*size = 65;
|
*size = 65;
|
||||||
pub[0] = SECP256K1_TAG_PUBKEY_UNCOMPRESSED;
|
pub[0] = SECP256K1_TAG_PUBKEY_UNCOMPRESSED;
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(&pub[33], &elem->y);
|
rustsecp256k1_v0_1_1_fe_get_b32(&pub[33], &elem->y);
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_privkey_tweak_add(rustsecp256k1_v0_1_0_scalar *key, const rustsecp256k1_v0_1_0_scalar *tweak) {
|
static int rustsecp256k1_v0_1_1_eckey_privkey_tweak_add(rustsecp256k1_v0_1_1_scalar *key, const rustsecp256k1_v0_1_1_scalar *tweak) {
|
||||||
rustsecp256k1_v0_1_0_scalar_add(key, key, tweak);
|
rustsecp256k1_v0_1_1_scalar_add(key, key, tweak);
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(key)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(key)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_tweak_add(const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_ge *key, const rustsecp256k1_v0_1_0_scalar *tweak) {
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_tweak_add(const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_ge *key, const rustsecp256k1_v0_1_1_scalar *tweak) {
|
||||||
rustsecp256k1_v0_1_0_gej pt;
|
rustsecp256k1_v0_1_1_gej pt;
|
||||||
rustsecp256k1_v0_1_0_scalar one;
|
rustsecp256k1_v0_1_1_scalar one;
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&pt, key);
|
rustsecp256k1_v0_1_1_gej_set_ge(&pt, key);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&one, 1);
|
rustsecp256k1_v0_1_1_scalar_set_int(&one, 1);
|
||||||
rustsecp256k1_v0_1_0_ecmult(ctx, &pt, &pt, &one, tweak);
|
rustsecp256k1_v0_1_1_ecmult(ctx, &pt, &pt, &one, tweak);
|
||||||
|
|
||||||
if (rustsecp256k1_v0_1_0_gej_is_infinity(&pt)) {
|
if (rustsecp256k1_v0_1_1_gej_is_infinity(&pt)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(key, &pt);
|
rustsecp256k1_v0_1_1_ge_set_gej(key, &pt);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_privkey_tweak_mul(rustsecp256k1_v0_1_0_scalar *key, const rustsecp256k1_v0_1_0_scalar *tweak) {
|
static int rustsecp256k1_v0_1_1_eckey_privkey_tweak_mul(rustsecp256k1_v0_1_1_scalar *key, const rustsecp256k1_v0_1_1_scalar *tweak) {
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(tweak)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(tweak)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(key, key, tweak);
|
rustsecp256k1_v0_1_1_scalar_mul(key, key, tweak);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_eckey_pubkey_tweak_mul(const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_ge *key, const rustsecp256k1_v0_1_0_scalar *tweak) {
|
static int rustsecp256k1_v0_1_1_eckey_pubkey_tweak_mul(const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_ge *key, const rustsecp256k1_v0_1_1_scalar *tweak) {
|
||||||
rustsecp256k1_v0_1_0_scalar zero;
|
rustsecp256k1_v0_1_1_scalar zero;
|
||||||
rustsecp256k1_v0_1_0_gej pt;
|
rustsecp256k1_v0_1_1_gej pt;
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(tweak)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(tweak)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&zero, 0);
|
rustsecp256k1_v0_1_1_scalar_set_int(&zero, 0);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&pt, key);
|
rustsecp256k1_v0_1_1_gej_set_ge(&pt, key);
|
||||||
rustsecp256k1_v0_1_0_ecmult(ctx, &pt, &pt, tweak, &zero);
|
rustsecp256k1_v0_1_1_ecmult(ctx, &pt, &pt, tweak, &zero);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(key, &pt);
|
rustsecp256k1_v0_1_1_ge_set_gej(key, &pt);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,23 +14,23 @@
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
/* For accelerating the computation of a*P + b*G: */
|
/* For accelerating the computation of a*P + b*G: */
|
||||||
rustsecp256k1_v0_1_0_ge_storage (*pre_g)[]; /* odd multiples of the generator */
|
rustsecp256k1_v0_1_1_ge_storage (*pre_g)[]; /* odd multiples of the generator */
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
rustsecp256k1_v0_1_0_ge_storage (*pre_g_128)[]; /* odd multiples of 2^128*generator */
|
rustsecp256k1_v0_1_1_ge_storage (*pre_g_128)[]; /* odd multiples of 2^128*generator */
|
||||||
#endif
|
#endif
|
||||||
} rustsecp256k1_v0_1_0_ecmult_context;
|
} rustsecp256k1_v0_1_1_ecmult_context;
|
||||||
|
|
||||||
static const size_t SECP256K1_ECMULT_CONTEXT_PREALLOCATED_SIZE;
|
static const size_t SECP256K1_ECMULT_CONTEXT_PREALLOCATED_SIZE;
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_context_init(rustsecp256k1_v0_1_0_ecmult_context *ctx);
|
static void rustsecp256k1_v0_1_1_ecmult_context_init(rustsecp256k1_v0_1_1_ecmult_context *ctx);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_context_build(rustsecp256k1_v0_1_0_ecmult_context *ctx, void **prealloc);
|
static void rustsecp256k1_v0_1_1_ecmult_context_build(rustsecp256k1_v0_1_1_ecmult_context *ctx, void **prealloc);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_context_finalize_memcpy(rustsecp256k1_v0_1_0_ecmult_context *dst, const rustsecp256k1_v0_1_0_ecmult_context *src);
|
static void rustsecp256k1_v0_1_1_ecmult_context_finalize_memcpy(rustsecp256k1_v0_1_1_ecmult_context *dst, const rustsecp256k1_v0_1_1_ecmult_context *src);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_context_clear(rustsecp256k1_v0_1_0_ecmult_context *ctx);
|
static void rustsecp256k1_v0_1_1_ecmult_context_clear(rustsecp256k1_v0_1_1_ecmult_context *ctx);
|
||||||
static int rustsecp256k1_v0_1_0_ecmult_context_is_built(const rustsecp256k1_v0_1_0_ecmult_context *ctx);
|
static int rustsecp256k1_v0_1_1_ecmult_context_is_built(const rustsecp256k1_v0_1_1_ecmult_context *ctx);
|
||||||
|
|
||||||
/** Double multiply: R = na*A + ng*G */
|
/** Double multiply: R = na*A + ng*G */
|
||||||
static void rustsecp256k1_v0_1_0_ecmult(const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_scalar *na, const rustsecp256k1_v0_1_0_scalar *ng);
|
static void rustsecp256k1_v0_1_1_ecmult(const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_scalar *na, const rustsecp256k1_v0_1_1_scalar *ng);
|
||||||
|
|
||||||
typedef int (rustsecp256k1_v0_1_0_ecmult_multi_callback)(rustsecp256k1_v0_1_0_scalar *sc, rustsecp256k1_v0_1_0_ge *pt, size_t idx, void *data);
|
typedef int (rustsecp256k1_v0_1_1_ecmult_multi_callback)(rustsecp256k1_v0_1_1_scalar *sc, rustsecp256k1_v0_1_1_ge *pt, size_t idx, void *data);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Multi-multiply: R = inp_g_sc * G + sum_i ni * Ai.
|
* Multi-multiply: R = inp_g_sc * G + sum_i ni * Ai.
|
||||||
|
@ -43,6 +43,6 @@ typedef int (rustsecp256k1_v0_1_0_ecmult_multi_callback)(rustsecp256k1_v0_1_0_sc
|
||||||
* 0 if there is not enough scratch space for a single point or
|
* 0 if there is not enough scratch space for a single point or
|
||||||
* callback returns 0
|
* callback returns 0
|
||||||
*/
|
*/
|
||||||
static int rustsecp256k1_v0_1_0_ecmult_multi_var(const rustsecp256k1_v0_1_0_callback* error_callback, const rustsecp256k1_v0_1_0_ecmult_context *ctx, rustsecp256k1_v0_1_0_scratch *scratch, rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_scalar *inp_g_sc, rustsecp256k1_v0_1_0_ecmult_multi_callback cb, void *cbdata, size_t n);
|
static int rustsecp256k1_v0_1_1_ecmult_multi_var(const rustsecp256k1_v0_1_1_callback* error_callback, const rustsecp256k1_v0_1_1_ecmult_context *ctx, rustsecp256k1_v0_1_1_scratch *scratch, rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_scalar *inp_g_sc, rustsecp256k1_v0_1_1_ecmult_multi_callback cb, void *cbdata, size_t n);
|
||||||
|
|
||||||
#endif /* SECP256K1_ECMULT_H */
|
#endif /* SECP256K1_ECMULT_H */
|
||||||
|
|
|
@ -12,6 +12,6 @@
|
||||||
|
|
||||||
/* Here `bits` should be set to the maximum bitlength of the _absolute value_ of `q`, plus
|
/* Here `bits` should be set to the maximum bitlength of the _absolute value_ of `q`, plus
|
||||||
* one because we internally sometimes add 2 to the number during the WNAF conversion. */
|
* one because we internally sometimes add 2 to the number during the WNAF conversion. */
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_ge *a, const rustsecp256k1_v0_1_0_scalar *q, int bits);
|
static void rustsecp256k1_v0_1_1_ecmult_const(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_ge *a, const rustsecp256k1_v0_1_1_scalar *q, int bits);
|
||||||
|
|
||||||
#endif /* SECP256K1_ECMULT_CONST_H */
|
#endif /* SECP256K1_ECMULT_CONST_H */
|
||||||
|
|
|
@ -17,21 +17,21 @@
|
||||||
int m; \
|
int m; \
|
||||||
int abs_n = (n) * (((n) > 0) * 2 - 1); \
|
int abs_n = (n) * (((n) > 0) * 2 - 1); \
|
||||||
int idx_n = abs_n / 2; \
|
int idx_n = abs_n / 2; \
|
||||||
rustsecp256k1_v0_1_0_fe neg_y; \
|
rustsecp256k1_v0_1_1_fe neg_y; \
|
||||||
VERIFY_CHECK(((n) & 1) == 1); \
|
VERIFY_CHECK(((n) & 1) == 1); \
|
||||||
VERIFY_CHECK((n) >= -((1 << ((w)-1)) - 1)); \
|
VERIFY_CHECK((n) >= -((1 << ((w)-1)) - 1)); \
|
||||||
VERIFY_CHECK((n) <= ((1 << ((w)-1)) - 1)); \
|
VERIFY_CHECK((n) <= ((1 << ((w)-1)) - 1)); \
|
||||||
VERIFY_SETUP(rustsecp256k1_v0_1_0_fe_clear(&(r)->x)); \
|
VERIFY_SETUP(rustsecp256k1_v0_1_1_fe_clear(&(r)->x)); \
|
||||||
VERIFY_SETUP(rustsecp256k1_v0_1_0_fe_clear(&(r)->y)); \
|
VERIFY_SETUP(rustsecp256k1_v0_1_1_fe_clear(&(r)->y)); \
|
||||||
for (m = 0; m < ECMULT_TABLE_SIZE(w); m++) { \
|
for (m = 0; m < ECMULT_TABLE_SIZE(w); m++) { \
|
||||||
/* This loop is used to avoid secret data in array indices. See
|
/* This loop is used to avoid secret data in array indices. See
|
||||||
* the comment in ecmult_gen_impl.h for rationale. */ \
|
* the comment in ecmult_gen_impl.h for rationale. */ \
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&(r)->x, &(pre)[m].x, m == idx_n); \
|
rustsecp256k1_v0_1_1_fe_cmov(&(r)->x, &(pre)[m].x, m == idx_n); \
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&(r)->y, &(pre)[m].y, m == idx_n); \
|
rustsecp256k1_v0_1_1_fe_cmov(&(r)->y, &(pre)[m].y, m == idx_n); \
|
||||||
} \
|
} \
|
||||||
(r)->infinity = 0; \
|
(r)->infinity = 0; \
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&neg_y, &(r)->y, 1); \
|
rustsecp256k1_v0_1_1_fe_negate(&neg_y, &(r)->y, 1); \
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&(r)->y, &neg_y, (n) != abs_n); \
|
rustsecp256k1_v0_1_1_fe_cmov(&(r)->y, &neg_y, (n) != abs_n); \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@
|
||||||
*
|
*
|
||||||
* Numbers reference steps of `Algorithm SPA-resistant Width-w NAF with Odd Scalar` on pp. 335
|
* Numbers reference steps of `Algorithm SPA-resistant Width-w NAF with Odd Scalar` on pp. 335
|
||||||
*/
|
*/
|
||||||
static int rustsecp256k1_v0_1_0_wnaf_const(int *wnaf, const rustsecp256k1_v0_1_0_scalar *scalar, int w, int size) {
|
static int rustsecp256k1_v0_1_1_wnaf_const(int *wnaf, const rustsecp256k1_v0_1_1_scalar *scalar, int w, int size) {
|
||||||
int global_sign;
|
int global_sign;
|
||||||
int skew = 0;
|
int skew = 0;
|
||||||
int word = 0;
|
int word = 0;
|
||||||
|
@ -59,7 +59,7 @@ static int rustsecp256k1_v0_1_0_wnaf_const(int *wnaf, const rustsecp256k1_v0_1_0
|
||||||
|
|
||||||
int flip;
|
int flip;
|
||||||
int bit;
|
int bit;
|
||||||
rustsecp256k1_v0_1_0_scalar s;
|
rustsecp256k1_v0_1_1_scalar s;
|
||||||
int not_neg_one;
|
int not_neg_one;
|
||||||
|
|
||||||
VERIFY_CHECK(w > 0);
|
VERIFY_CHECK(w > 0);
|
||||||
|
@ -77,33 +77,33 @@ static int rustsecp256k1_v0_1_0_wnaf_const(int *wnaf, const rustsecp256k1_v0_1_0
|
||||||
* particular, to ensure that the outputs from the endomorphism-split fit into
|
* particular, to ensure that the outputs from the endomorphism-split fit into
|
||||||
* 128 bits). If we negate, the parity of our number flips, inverting which of
|
* 128 bits). If we negate, the parity of our number flips, inverting which of
|
||||||
* {1, 2} we want to add to the scalar when ensuring that it's odd. Further
|
* {1, 2} we want to add to the scalar when ensuring that it's odd. Further
|
||||||
* complicating things, -1 interacts badly with `rustsecp256k1_v0_1_0_scalar_cadd_bit` and
|
* complicating things, -1 interacts badly with `rustsecp256k1_v0_1_1_scalar_cadd_bit` and
|
||||||
* we need to special-case it in this logic. */
|
* we need to special-case it in this logic. */
|
||||||
flip = rustsecp256k1_v0_1_0_scalar_is_high(scalar);
|
flip = rustsecp256k1_v0_1_1_scalar_is_high(scalar);
|
||||||
/* We add 1 to even numbers, 2 to odd ones, noting that negation flips parity */
|
/* We add 1 to even numbers, 2 to odd ones, noting that negation flips parity */
|
||||||
bit = flip ^ !rustsecp256k1_v0_1_0_scalar_is_even(scalar);
|
bit = flip ^ !rustsecp256k1_v0_1_1_scalar_is_even(scalar);
|
||||||
/* We check for negative one, since adding 2 to it will cause an overflow */
|
/* We check for negative one, since adding 2 to it will cause an overflow */
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&s, scalar);
|
rustsecp256k1_v0_1_1_scalar_negate(&s, scalar);
|
||||||
not_neg_one = !rustsecp256k1_v0_1_0_scalar_is_one(&s);
|
not_neg_one = !rustsecp256k1_v0_1_1_scalar_is_one(&s);
|
||||||
s = *scalar;
|
s = *scalar;
|
||||||
rustsecp256k1_v0_1_0_scalar_cadd_bit(&s, bit, not_neg_one);
|
rustsecp256k1_v0_1_1_scalar_cadd_bit(&s, bit, not_neg_one);
|
||||||
/* If we had negative one, flip == 1, s.d[0] == 0, bit == 1, so caller expects
|
/* If we had negative one, flip == 1, s.d[0] == 0, bit == 1, so caller expects
|
||||||
* that we added two to it and flipped it. In fact for -1 these operations are
|
* that we added two to it and flipped it. In fact for -1 these operations are
|
||||||
* identical. We only flipped, but since skewing is required (in the sense that
|
* identical. We only flipped, but since skewing is required (in the sense that
|
||||||
* the skew must be 1 or 2, never zero) and flipping is not, we need to change
|
* the skew must be 1 or 2, never zero) and flipping is not, we need to change
|
||||||
* our flags to claim that we only skewed. */
|
* our flags to claim that we only skewed. */
|
||||||
global_sign = rustsecp256k1_v0_1_0_scalar_cond_negate(&s, flip);
|
global_sign = rustsecp256k1_v0_1_1_scalar_cond_negate(&s, flip);
|
||||||
global_sign *= not_neg_one * 2 - 1;
|
global_sign *= not_neg_one * 2 - 1;
|
||||||
skew = 1 << bit;
|
skew = 1 << bit;
|
||||||
|
|
||||||
/* 4 */
|
/* 4 */
|
||||||
u_last = rustsecp256k1_v0_1_0_scalar_shr_int(&s, w);
|
u_last = rustsecp256k1_v0_1_1_scalar_shr_int(&s, w);
|
||||||
do {
|
do {
|
||||||
int sign;
|
int sign;
|
||||||
int even;
|
int even;
|
||||||
|
|
||||||
/* 4.1 4.4 */
|
/* 4.1 4.4 */
|
||||||
u = rustsecp256k1_v0_1_0_scalar_shr_int(&s, w);
|
u = rustsecp256k1_v0_1_1_scalar_shr_int(&s, w);
|
||||||
/* 4.2 */
|
/* 4.2 */
|
||||||
even = ((u & 1) == 0);
|
even = ((u & 1) == 0);
|
||||||
sign = 2 * (u_last > 0) - 1;
|
sign = 2 * (u_last > 0) - 1;
|
||||||
|
@ -117,22 +117,22 @@ static int rustsecp256k1_v0_1_0_wnaf_const(int *wnaf, const rustsecp256k1_v0_1_0
|
||||||
} while (word * w < size);
|
} while (word * w < size);
|
||||||
wnaf[word] = u * global_sign;
|
wnaf[word] = u * global_sign;
|
||||||
|
|
||||||
VERIFY_CHECK(rustsecp256k1_v0_1_0_scalar_is_zero(&s));
|
VERIFY_CHECK(rustsecp256k1_v0_1_1_scalar_is_zero(&s));
|
||||||
VERIFY_CHECK(word == WNAF_SIZE_BITS(size, w));
|
VERIFY_CHECK(word == WNAF_SIZE_BITS(size, w));
|
||||||
return skew;
|
return skew;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_ge *a, const rustsecp256k1_v0_1_0_scalar *scalar, int size) {
|
static void rustsecp256k1_v0_1_1_ecmult_const(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_ge *a, const rustsecp256k1_v0_1_1_scalar *scalar, int size) {
|
||||||
rustsecp256k1_v0_1_0_ge pre_a[ECMULT_TABLE_SIZE(WINDOW_A)];
|
rustsecp256k1_v0_1_1_ge pre_a[ECMULT_TABLE_SIZE(WINDOW_A)];
|
||||||
rustsecp256k1_v0_1_0_ge tmpa;
|
rustsecp256k1_v0_1_1_ge tmpa;
|
||||||
rustsecp256k1_v0_1_0_fe Z;
|
rustsecp256k1_v0_1_1_fe Z;
|
||||||
|
|
||||||
int skew_1;
|
int skew_1;
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
rustsecp256k1_v0_1_0_ge pre_a_lam[ECMULT_TABLE_SIZE(WINDOW_A)];
|
rustsecp256k1_v0_1_1_ge pre_a_lam[ECMULT_TABLE_SIZE(WINDOW_A)];
|
||||||
int wnaf_lam[1 + WNAF_SIZE(WINDOW_A - 1)];
|
int wnaf_lam[1 + WNAF_SIZE(WINDOW_A - 1)];
|
||||||
int skew_lam;
|
int skew_lam;
|
||||||
rustsecp256k1_v0_1_0_scalar q_1, q_lam;
|
rustsecp256k1_v0_1_1_scalar q_1, q_lam;
|
||||||
#endif
|
#endif
|
||||||
int wnaf_1[1 + WNAF_SIZE(WINDOW_A - 1)];
|
int wnaf_1[1 + WNAF_SIZE(WINDOW_A - 1)];
|
||||||
|
|
||||||
|
@ -144,13 +144,13 @@ static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
rsize = 128;
|
rsize = 128;
|
||||||
/* split q into q_1 and q_lam (where q = q_1 + q_lam*lambda, and q_1 and q_lam are ~128 bit) */
|
/* split q into q_1 and q_lam (where q = q_1 + q_lam*lambda, and q_1 and q_lam are ~128 bit) */
|
||||||
rustsecp256k1_v0_1_0_scalar_split_lambda(&q_1, &q_lam, scalar);
|
rustsecp256k1_v0_1_1_scalar_split_lambda(&q_1, &q_lam, scalar);
|
||||||
skew_1 = rustsecp256k1_v0_1_0_wnaf_const(wnaf_1, &q_1, WINDOW_A - 1, 128);
|
skew_1 = rustsecp256k1_v0_1_1_wnaf_const(wnaf_1, &q_1, WINDOW_A - 1, 128);
|
||||||
skew_lam = rustsecp256k1_v0_1_0_wnaf_const(wnaf_lam, &q_lam, WINDOW_A - 1, 128);
|
skew_lam = rustsecp256k1_v0_1_1_wnaf_const(wnaf_lam, &q_lam, WINDOW_A - 1, 128);
|
||||||
} else
|
} else
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
skew_1 = rustsecp256k1_v0_1_0_wnaf_const(wnaf_1, scalar, WINDOW_A - 1, size);
|
skew_1 = rustsecp256k1_v0_1_1_wnaf_const(wnaf_1, scalar, WINDOW_A - 1, size);
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
skew_lam = 0;
|
skew_lam = 0;
|
||||||
#endif
|
#endif
|
||||||
|
@ -162,15 +162,15 @@ static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const
|
||||||
* that the Z coordinate was 1, use affine addition formulae, and correct
|
* that the Z coordinate was 1, use affine addition formulae, and correct
|
||||||
* the Z coordinate of the result once at the end.
|
* the Z coordinate of the result once at the end.
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(r, a);
|
rustsecp256k1_v0_1_1_gej_set_ge(r, a);
|
||||||
rustsecp256k1_v0_1_0_ecmult_odd_multiples_table_globalz_windowa(pre_a, &Z, r);
|
rustsecp256k1_v0_1_1_ecmult_odd_multiples_table_globalz_windowa(pre_a, &Z, r);
|
||||||
for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {
|
for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&pre_a[i].y);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&pre_a[i].y);
|
||||||
}
|
}
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {
|
for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {
|
||||||
rustsecp256k1_v0_1_0_ge_mul_lambda(&pre_a_lam[i], &pre_a[i]);
|
rustsecp256k1_v0_1_1_ge_mul_lambda(&pre_a_lam[i], &pre_a[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -181,13 +181,13 @@ static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const
|
||||||
i = wnaf_1[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];
|
i = wnaf_1[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];
|
||||||
VERIFY_CHECK(i != 0);
|
VERIFY_CHECK(i != 0);
|
||||||
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, i, WINDOW_A);
|
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, i, WINDOW_A);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(r, &tmpa);
|
rustsecp256k1_v0_1_1_gej_set_ge(r, &tmpa);
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
i = wnaf_lam[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];
|
i = wnaf_lam[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];
|
||||||
VERIFY_CHECK(i != 0);
|
VERIFY_CHECK(i != 0);
|
||||||
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, i, WINDOW_A);
|
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, i, WINDOW_A);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &tmpa);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &tmpa);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
/* remaining loop iterations */
|
/* remaining loop iterations */
|
||||||
|
@ -195,64 +195,64 @@ static void rustsecp256k1_v0_1_0_ecmult_const(rustsecp256k1_v0_1_0_gej *r, const
|
||||||
int n;
|
int n;
|
||||||
int j;
|
int j;
|
||||||
for (j = 0; j < WINDOW_A - 1; ++j) {
|
for (j = 0; j < WINDOW_A - 1; ++j) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_nonzero(r, r, NULL);
|
rustsecp256k1_v0_1_1_gej_double_nonzero(r, r, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
n = wnaf_1[i];
|
n = wnaf_1[i];
|
||||||
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, n, WINDOW_A);
|
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, n, WINDOW_A);
|
||||||
VERIFY_CHECK(n != 0);
|
VERIFY_CHECK(n != 0);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &tmpa);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &tmpa);
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
n = wnaf_lam[i];
|
n = wnaf_lam[i];
|
||||||
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, n, WINDOW_A);
|
ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, n, WINDOW_A);
|
||||||
VERIFY_CHECK(n != 0);
|
VERIFY_CHECK(n != 0);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &tmpa);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &tmpa);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &r->z, &Z);
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &r->z, &Z);
|
||||||
|
|
||||||
{
|
{
|
||||||
/* Correct for wNAF skew */
|
/* Correct for wNAF skew */
|
||||||
rustsecp256k1_v0_1_0_ge correction = *a;
|
rustsecp256k1_v0_1_1_ge correction = *a;
|
||||||
rustsecp256k1_v0_1_0_ge_storage correction_1_stor;
|
rustsecp256k1_v0_1_1_ge_storage correction_1_stor;
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
rustsecp256k1_v0_1_0_ge_storage correction_lam_stor;
|
rustsecp256k1_v0_1_1_ge_storage correction_lam_stor;
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_ge_storage a2_stor;
|
rustsecp256k1_v0_1_1_ge_storage a2_stor;
|
||||||
rustsecp256k1_v0_1_0_gej tmpj;
|
rustsecp256k1_v0_1_1_gej tmpj;
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&tmpj, &correction);
|
rustsecp256k1_v0_1_1_gej_set_ge(&tmpj, &correction);
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&tmpj, &tmpj, NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&tmpj, &tmpj, NULL);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&correction, &tmpj);
|
rustsecp256k1_v0_1_1_ge_set_gej(&correction, &tmpj);
|
||||||
rustsecp256k1_v0_1_0_ge_to_storage(&correction_1_stor, a);
|
rustsecp256k1_v0_1_1_ge_to_storage(&correction_1_stor, a);
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
rustsecp256k1_v0_1_0_ge_to_storage(&correction_lam_stor, a);
|
rustsecp256k1_v0_1_1_ge_to_storage(&correction_lam_stor, a);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_ge_to_storage(&a2_stor, &correction);
|
rustsecp256k1_v0_1_1_ge_to_storage(&a2_stor, &correction);
|
||||||
|
|
||||||
/* For odd numbers this is 2a (so replace it), for even ones a (so no-op) */
|
/* For odd numbers this is 2a (so replace it), for even ones a (so no-op) */
|
||||||
rustsecp256k1_v0_1_0_ge_storage_cmov(&correction_1_stor, &a2_stor, skew_1 == 2);
|
rustsecp256k1_v0_1_1_ge_storage_cmov(&correction_1_stor, &a2_stor, skew_1 == 2);
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
rustsecp256k1_v0_1_0_ge_storage_cmov(&correction_lam_stor, &a2_stor, skew_lam == 2);
|
rustsecp256k1_v0_1_1_ge_storage_cmov(&correction_lam_stor, &a2_stor, skew_lam == 2);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Apply the correction */
|
/* Apply the correction */
|
||||||
rustsecp256k1_v0_1_0_ge_from_storage(&correction, &correction_1_stor);
|
rustsecp256k1_v0_1_1_ge_from_storage(&correction, &correction_1_stor);
|
||||||
rustsecp256k1_v0_1_0_ge_neg(&correction, &correction);
|
rustsecp256k1_v0_1_1_ge_neg(&correction, &correction);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &correction);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &correction);
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
if (size > 128) {
|
if (size > 128) {
|
||||||
rustsecp256k1_v0_1_0_ge_from_storage(&correction, &correction_lam_stor);
|
rustsecp256k1_v0_1_1_ge_from_storage(&correction, &correction_lam_stor);
|
||||||
rustsecp256k1_v0_1_0_ge_neg(&correction, &correction);
|
rustsecp256k1_v0_1_1_ge_neg(&correction, &correction);
|
||||||
rustsecp256k1_v0_1_0_ge_mul_lambda(&correction, &correction);
|
rustsecp256k1_v0_1_1_ge_mul_lambda(&correction, &correction);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &correction);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &correction);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,21 +23,21 @@ typedef struct {
|
||||||
* None of the resulting prec group elements have a known scalar, and neither do any of
|
* None of the resulting prec group elements have a known scalar, and neither do any of
|
||||||
* the intermediate sums while computing a*G.
|
* the intermediate sums while computing a*G.
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_ge_storage (*prec)[64][16]; /* prec[j][i] = 16^j * i * G + U_i */
|
rustsecp256k1_v0_1_1_ge_storage (*prec)[64][16]; /* prec[j][i] = 16^j * i * G + U_i */
|
||||||
rustsecp256k1_v0_1_0_scalar blind;
|
rustsecp256k1_v0_1_1_scalar blind;
|
||||||
rustsecp256k1_v0_1_0_gej initial;
|
rustsecp256k1_v0_1_1_gej initial;
|
||||||
} rustsecp256k1_v0_1_0_ecmult_gen_context;
|
} rustsecp256k1_v0_1_1_ecmult_gen_context;
|
||||||
|
|
||||||
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_init(rustsecp256k1_v0_1_0_ecmult_gen_context* ctx);
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_init(rustsecp256k1_v0_1_1_ecmult_gen_context* ctx);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_build(rustsecp256k1_v0_1_0_ecmult_gen_context* ctx, void **prealloc);
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_build(rustsecp256k1_v0_1_1_ecmult_gen_context* ctx, void **prealloc);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_finalize_memcpy(rustsecp256k1_v0_1_0_ecmult_gen_context *dst, const rustsecp256k1_v0_1_0_ecmult_gen_context* src);
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_finalize_memcpy(rustsecp256k1_v0_1_1_ecmult_gen_context *dst, const rustsecp256k1_v0_1_1_ecmult_gen_context* src);
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_clear(rustsecp256k1_v0_1_0_ecmult_gen_context* ctx);
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_clear(rustsecp256k1_v0_1_1_ecmult_gen_context* ctx);
|
||||||
static int rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(const rustsecp256k1_v0_1_0_ecmult_gen_context* ctx);
|
static int rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(const rustsecp256k1_v0_1_1_ecmult_gen_context* ctx);
|
||||||
|
|
||||||
/** Multiply with the generator: R = a*G */
|
/** Multiply with the generator: R = a*G */
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen(const rustsecp256k1_v0_1_0_ecmult_gen_context* ctx, rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_ecmult_gen(const rustsecp256k1_v0_1_1_ecmult_gen_context* ctx, rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_blind(rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, const unsigned char *seed32);
|
static void rustsecp256k1_v0_1_1_ecmult_gen_blind(rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, const unsigned char *seed32);
|
||||||
|
|
||||||
#endif /* SECP256K1_ECMULT_GEN_H */
|
#endif /* SECP256K1_ECMULT_GEN_H */
|
||||||
|
|
|
@ -17,20 +17,20 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
||||||
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE = ROUND_TO_ALIGN(sizeof(*((rustsecp256k1_v0_1_0_ecmult_gen_context*) NULL)->prec));
|
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE = ROUND_TO_ALIGN(sizeof(*((rustsecp256k1_v0_1_1_ecmult_gen_context*) NULL)->prec));
|
||||||
#else
|
#else
|
||||||
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE = 0;
|
static const size_t SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE = 0;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_init(rustsecp256k1_v0_1_0_ecmult_gen_context *ctx) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_init(rustsecp256k1_v0_1_1_ecmult_gen_context *ctx) {
|
||||||
ctx->prec = NULL;
|
ctx->prec = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_build(rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, void **prealloc) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_build(rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, void **prealloc) {
|
||||||
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
||||||
rustsecp256k1_v0_1_0_ge prec[1024];
|
rustsecp256k1_v0_1_1_ge prec[1024];
|
||||||
rustsecp256k1_v0_1_0_gej gj;
|
rustsecp256k1_v0_1_1_gej gj;
|
||||||
rustsecp256k1_v0_1_0_gej nums_gej;
|
rustsecp256k1_v0_1_1_gej nums_gej;
|
||||||
int i, j;
|
int i, j;
|
||||||
size_t const prealloc_size = SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
size_t const prealloc_size = SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
||||||
void* const base = *prealloc;
|
void* const base = *prealloc;
|
||||||
|
@ -40,101 +40,101 @@ static void rustsecp256k1_v0_1_0_ecmult_gen_context_build(rustsecp256k1_v0_1_0_e
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
||||||
ctx->prec = (rustsecp256k1_v0_1_0_ge_storage (*)[64][16])manual_alloc(prealloc, prealloc_size, base, prealloc_size);
|
ctx->prec = (rustsecp256k1_v0_1_1_ge_storage (*)[64][16])manual_alloc(prealloc, prealloc_size, base, prealloc_size);
|
||||||
|
|
||||||
/* get the generator */
|
/* get the generator */
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&gj, &rustsecp256k1_v0_1_0_ge_const_g);
|
rustsecp256k1_v0_1_1_gej_set_ge(&gj, &rustsecp256k1_v0_1_1_ge_const_g);
|
||||||
|
|
||||||
/* Construct a group element with no known corresponding scalar (nothing up my sleeve). */
|
/* Construct a group element with no known corresponding scalar (nothing up my sleeve). */
|
||||||
{
|
{
|
||||||
static const unsigned char nums_b32[33] = "The scalar for this x is unknown";
|
static const unsigned char nums_b32[33] = "The scalar for this x is unknown";
|
||||||
rustsecp256k1_v0_1_0_fe nums_x;
|
rustsecp256k1_v0_1_1_fe nums_x;
|
||||||
rustsecp256k1_v0_1_0_ge nums_ge;
|
rustsecp256k1_v0_1_1_ge nums_ge;
|
||||||
int r;
|
int r;
|
||||||
r = rustsecp256k1_v0_1_0_fe_set_b32(&nums_x, nums_b32);
|
r = rustsecp256k1_v0_1_1_fe_set_b32(&nums_x, nums_b32);
|
||||||
(void)r;
|
(void)r;
|
||||||
VERIFY_CHECK(r);
|
VERIFY_CHECK(r);
|
||||||
r = rustsecp256k1_v0_1_0_ge_set_xo_var(&nums_ge, &nums_x, 0);
|
r = rustsecp256k1_v0_1_1_ge_set_xo_var(&nums_ge, &nums_x, 0);
|
||||||
(void)r;
|
(void)r;
|
||||||
VERIFY_CHECK(r);
|
VERIFY_CHECK(r);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&nums_gej, &nums_ge);
|
rustsecp256k1_v0_1_1_gej_set_ge(&nums_gej, &nums_ge);
|
||||||
/* Add G to make the bits in x uniformly distributed. */
|
/* Add G to make the bits in x uniformly distributed. */
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge_var(&nums_gej, &nums_gej, &rustsecp256k1_v0_1_0_ge_const_g, NULL);
|
rustsecp256k1_v0_1_1_gej_add_ge_var(&nums_gej, &nums_gej, &rustsecp256k1_v0_1_1_ge_const_g, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* compute prec. */
|
/* compute prec. */
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_gej precj[1024]; /* Jacobian versions of prec. */
|
rustsecp256k1_v0_1_1_gej precj[1024]; /* Jacobian versions of prec. */
|
||||||
rustsecp256k1_v0_1_0_gej gbase;
|
rustsecp256k1_v0_1_1_gej gbase;
|
||||||
rustsecp256k1_v0_1_0_gej numsbase;
|
rustsecp256k1_v0_1_1_gej numsbase;
|
||||||
gbase = gj; /* 16^j * G */
|
gbase = gj; /* 16^j * G */
|
||||||
numsbase = nums_gej; /* 2^j * nums. */
|
numsbase = nums_gej; /* 2^j * nums. */
|
||||||
for (j = 0; j < 64; j++) {
|
for (j = 0; j < 64; j++) {
|
||||||
/* Set precj[j*16 .. j*16+15] to (numsbase, numsbase + gbase, ..., numsbase + 15*gbase). */
|
/* Set precj[j*16 .. j*16+15] to (numsbase, numsbase + gbase, ..., numsbase + 15*gbase). */
|
||||||
precj[j*16] = numsbase;
|
precj[j*16] = numsbase;
|
||||||
for (i = 1; i < 16; i++) {
|
for (i = 1; i < 16; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_add_var(&precj[j*16 + i], &precj[j*16 + i - 1], &gbase, NULL);
|
rustsecp256k1_v0_1_1_gej_add_var(&precj[j*16 + i], &precj[j*16 + i - 1], &gbase, NULL);
|
||||||
}
|
}
|
||||||
/* Multiply gbase by 16. */
|
/* Multiply gbase by 16. */
|
||||||
for (i = 0; i < 4; i++) {
|
for (i = 0; i < 4; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&gbase, &gbase, NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&gbase, &gbase, NULL);
|
||||||
}
|
}
|
||||||
/* Multiply numbase by 2. */
|
/* Multiply numbase by 2. */
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&numsbase, &numsbase, NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&numsbase, &numsbase, NULL);
|
||||||
if (j == 62) {
|
if (j == 62) {
|
||||||
/* In the last iteration, numsbase is (1 - 2^j) * nums instead. */
|
/* In the last iteration, numsbase is (1 - 2^j) * nums instead. */
|
||||||
rustsecp256k1_v0_1_0_gej_neg(&numsbase, &numsbase);
|
rustsecp256k1_v0_1_1_gej_neg(&numsbase, &numsbase);
|
||||||
rustsecp256k1_v0_1_0_gej_add_var(&numsbase, &numsbase, &nums_gej, NULL);
|
rustsecp256k1_v0_1_1_gej_add_var(&numsbase, &numsbase, &nums_gej, NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_set_all_gej_var(prec, precj, 1024);
|
rustsecp256k1_v0_1_1_ge_set_all_gej_var(prec, precj, 1024);
|
||||||
}
|
}
|
||||||
for (j = 0; j < 64; j++) {
|
for (j = 0; j < 64; j++) {
|
||||||
for (i = 0; i < 16; i++) {
|
for (i = 0; i < 16; i++) {
|
||||||
rustsecp256k1_v0_1_0_ge_to_storage(&(*ctx->prec)[j][i], &prec[j*16 + i]);
|
rustsecp256k1_v0_1_1_ge_to_storage(&(*ctx->prec)[j][i], &prec[j*16 + i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
(void)prealloc;
|
(void)prealloc;
|
||||||
ctx->prec = (rustsecp256k1_v0_1_0_ge_storage (*)[64][16])rustsecp256k1_v0_1_0_ecmult_static_context;
|
ctx->prec = (rustsecp256k1_v0_1_1_ge_storage (*)[64][16])rustsecp256k1_v0_1_1_ecmult_static_context;
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_blind(ctx, NULL);
|
rustsecp256k1_v0_1_1_ecmult_gen_blind(ctx, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(const rustsecp256k1_v0_1_0_ecmult_gen_context* ctx) {
|
static int rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(const rustsecp256k1_v0_1_1_ecmult_gen_context* ctx) {
|
||||||
return ctx->prec != NULL;
|
return ctx->prec != NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_finalize_memcpy(rustsecp256k1_v0_1_0_ecmult_gen_context *dst, const rustsecp256k1_v0_1_0_ecmult_gen_context *src) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_finalize_memcpy(rustsecp256k1_v0_1_1_ecmult_gen_context *dst, const rustsecp256k1_v0_1_1_ecmult_gen_context *src) {
|
||||||
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
#ifndef USE_ECMULT_STATIC_PRECOMPUTATION
|
||||||
if (src->prec != NULL) {
|
if (src->prec != NULL) {
|
||||||
/* We cast to void* first to suppress a -Wcast-align warning. */
|
/* We cast to void* first to suppress a -Wcast-align warning. */
|
||||||
dst->prec = (rustsecp256k1_v0_1_0_ge_storage (*)[64][16])(void*)((unsigned char*)dst + ((unsigned char*)src->prec - (unsigned char*)src));
|
dst->prec = (rustsecp256k1_v0_1_1_ge_storage (*)[64][16])(void*)((unsigned char*)dst + ((unsigned char*)src->prec - (unsigned char*)src));
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
(void)dst, (void)src;
|
(void)dst, (void)src;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_context_clear(rustsecp256k1_v0_1_0_ecmult_gen_context *ctx) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen_context_clear(rustsecp256k1_v0_1_1_ecmult_gen_context *ctx) {
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&ctx->blind);
|
rustsecp256k1_v0_1_1_scalar_clear(&ctx->blind);
|
||||||
rustsecp256k1_v0_1_0_gej_clear(&ctx->initial);
|
rustsecp256k1_v0_1_1_gej_clear(&ctx->initial);
|
||||||
ctx->prec = NULL;
|
ctx->prec = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen(const rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_scalar *gn) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen(const rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_scalar *gn) {
|
||||||
rustsecp256k1_v0_1_0_ge add;
|
rustsecp256k1_v0_1_1_ge add;
|
||||||
rustsecp256k1_v0_1_0_ge_storage adds;
|
rustsecp256k1_v0_1_1_ge_storage adds;
|
||||||
rustsecp256k1_v0_1_0_scalar gnb;
|
rustsecp256k1_v0_1_1_scalar gnb;
|
||||||
int bits;
|
int bits;
|
||||||
int i, j;
|
int i, j;
|
||||||
memset(&adds, 0, sizeof(adds));
|
memset(&adds, 0, sizeof(adds));
|
||||||
*r = ctx->initial;
|
*r = ctx->initial;
|
||||||
/* Blind scalar/point multiplication by computing (n-b)G + bG instead of nG. */
|
/* Blind scalar/point multiplication by computing (n-b)G + bG instead of nG. */
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&gnb, gn, &ctx->blind);
|
rustsecp256k1_v0_1_1_scalar_add(&gnb, gn, &ctx->blind);
|
||||||
add.infinity = 0;
|
add.infinity = 0;
|
||||||
for (j = 0; j < 64; j++) {
|
for (j = 0; j < 64; j++) {
|
||||||
bits = rustsecp256k1_v0_1_0_scalar_get_bits(&gnb, j * 4, 4);
|
bits = rustsecp256k1_v0_1_1_scalar_get_bits(&gnb, j * 4, 4);
|
||||||
for (i = 0; i < 16; i++) {
|
for (i = 0; i < 16; i++) {
|
||||||
/** This uses a conditional move to avoid any secret data in array indexes.
|
/** This uses a conditional move to avoid any secret data in array indexes.
|
||||||
* _Any_ use of secret indexes has been demonstrated to result in timing
|
* _Any_ use of secret indexes has been demonstrated to result in timing
|
||||||
|
@ -146,33 +146,33 @@ static void rustsecp256k1_v0_1_0_ecmult_gen(const rustsecp256k1_v0_1_0_ecmult_ge
|
||||||
* by Dag Arne Osvik, Adi Shamir, and Eran Tromer
|
* by Dag Arne Osvik, Adi Shamir, and Eran Tromer
|
||||||
* (http://www.tau.ac.il/~tromer/papers/cache.pdf)
|
* (http://www.tau.ac.il/~tromer/papers/cache.pdf)
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_ge_storage_cmov(&adds, &(*ctx->prec)[j][i], i == bits);
|
rustsecp256k1_v0_1_1_ge_storage_cmov(&adds, &(*ctx->prec)[j][i], i == bits);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_from_storage(&add, &adds);
|
rustsecp256k1_v0_1_1_ge_from_storage(&add, &adds);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(r, r, &add);
|
rustsecp256k1_v0_1_1_gej_add_ge(r, r, &add);
|
||||||
}
|
}
|
||||||
bits = 0;
|
bits = 0;
|
||||||
rustsecp256k1_v0_1_0_ge_clear(&add);
|
rustsecp256k1_v0_1_1_ge_clear(&add);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&gnb);
|
rustsecp256k1_v0_1_1_scalar_clear(&gnb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Setup blinding values for rustsecp256k1_v0_1_0_ecmult_gen. */
|
/* Setup blinding values for rustsecp256k1_v0_1_1_ecmult_gen. */
|
||||||
static void rustsecp256k1_v0_1_0_ecmult_gen_blind(rustsecp256k1_v0_1_0_ecmult_gen_context *ctx, const unsigned char *seed32) {
|
static void rustsecp256k1_v0_1_1_ecmult_gen_blind(rustsecp256k1_v0_1_1_ecmult_gen_context *ctx, const unsigned char *seed32) {
|
||||||
rustsecp256k1_v0_1_0_scalar b;
|
rustsecp256k1_v0_1_1_scalar b;
|
||||||
rustsecp256k1_v0_1_0_gej gb;
|
rustsecp256k1_v0_1_1_gej gb;
|
||||||
rustsecp256k1_v0_1_0_fe s;
|
rustsecp256k1_v0_1_1_fe s;
|
||||||
unsigned char nonce32[32];
|
unsigned char nonce32[32];
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 rng;
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 rng;
|
||||||
int retry;
|
int retry;
|
||||||
unsigned char keydata[64] = {0};
|
unsigned char keydata[64] = {0};
|
||||||
if (seed32 == NULL) {
|
if (seed32 == NULL) {
|
||||||
/* When seed is NULL, reset the initial point and blinding value. */
|
/* When seed is NULL, reset the initial point and blinding value. */
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&ctx->initial, &rustsecp256k1_v0_1_0_ge_const_g);
|
rustsecp256k1_v0_1_1_gej_set_ge(&ctx->initial, &rustsecp256k1_v0_1_1_ge_const_g);
|
||||||
rustsecp256k1_v0_1_0_gej_neg(&ctx->initial, &ctx->initial);
|
rustsecp256k1_v0_1_1_gej_neg(&ctx->initial, &ctx->initial);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&ctx->blind, 1);
|
rustsecp256k1_v0_1_1_scalar_set_int(&ctx->blind, 1);
|
||||||
}
|
}
|
||||||
/* The prior blinding value (if not reset) is chained forward by including it in the hash. */
|
/* The prior blinding value (if not reset) is chained forward by including it in the hash. */
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(nonce32, &ctx->blind);
|
rustsecp256k1_v0_1_1_scalar_get_b32(nonce32, &ctx->blind);
|
||||||
/** Using a CSPRNG allows a failure free interface, avoids needing large amounts of random data,
|
/** Using a CSPRNG allows a failure free interface, avoids needing large amounts of random data,
|
||||||
* and guards against weak or adversarial seeds. This is a simpler and safer interface than
|
* and guards against weak or adversarial seeds. This is a simpler and safer interface than
|
||||||
* asking the caller for blinding values directly and expecting them to retry on failure.
|
* asking the caller for blinding values directly and expecting them to retry on failure.
|
||||||
|
@ -181,31 +181,31 @@ static void rustsecp256k1_v0_1_0_ecmult_gen_blind(rustsecp256k1_v0_1_0_ecmult_ge
|
||||||
if (seed32 != NULL) {
|
if (seed32 != NULL) {
|
||||||
memcpy(keydata + 32, seed32, 32);
|
memcpy(keydata + 32, seed32, 32);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(&rng, keydata, seed32 ? 64 : 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(&rng, keydata, seed32 ? 64 : 32);
|
||||||
memset(keydata, 0, sizeof(keydata));
|
memset(keydata, 0, sizeof(keydata));
|
||||||
/* Retry for out of range results to achieve uniformity. */
|
/* Retry for out of range results to achieve uniformity. */
|
||||||
do {
|
do {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
||||||
retry = !rustsecp256k1_v0_1_0_fe_set_b32(&s, nonce32);
|
retry = !rustsecp256k1_v0_1_1_fe_set_b32(&s, nonce32);
|
||||||
retry |= rustsecp256k1_v0_1_0_fe_is_zero(&s);
|
retry |= rustsecp256k1_v0_1_1_fe_is_zero(&s);
|
||||||
} while (retry); /* This branch true is cryptographically unreachable. Requires sha256_hmac output > Fp. */
|
} while (retry); /* This branch true is cryptographically unreachable. Requires sha256_hmac output > Fp. */
|
||||||
/* Randomize the projection to defend against multiplier sidechannels. */
|
/* Randomize the projection to defend against multiplier sidechannels. */
|
||||||
rustsecp256k1_v0_1_0_gej_rescale(&ctx->initial, &s);
|
rustsecp256k1_v0_1_1_gej_rescale(&ctx->initial, &s);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&s);
|
rustsecp256k1_v0_1_1_fe_clear(&s);
|
||||||
do {
|
do {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&b, nonce32, &retry);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&b, nonce32, &retry);
|
||||||
/* A blinding value of 0 works, but would undermine the projection hardening. */
|
/* A blinding value of 0 works, but would undermine the projection hardening. */
|
||||||
retry |= rustsecp256k1_v0_1_0_scalar_is_zero(&b);
|
retry |= rustsecp256k1_v0_1_1_scalar_is_zero(&b);
|
||||||
} while (retry); /* This branch true is cryptographically unreachable. Requires sha256_hmac output > order. */
|
} while (retry); /* This branch true is cryptographically unreachable. Requires sha256_hmac output > order. */
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_finalize(&rng);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_finalize(&rng);
|
||||||
memset(nonce32, 0, 32);
|
memset(nonce32, 0, 32);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen(ctx, &gb, &b);
|
rustsecp256k1_v0_1_1_ecmult_gen(ctx, &gb, &b);
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&b, &b);
|
rustsecp256k1_v0_1_1_scalar_negate(&b, &b);
|
||||||
ctx->blind = b;
|
ctx->blind = b;
|
||||||
ctx->initial = gb;
|
ctx->initial = gb;
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&b);
|
rustsecp256k1_v0_1_1_scalar_clear(&b);
|
||||||
rustsecp256k1_v0_1_0_gej_clear(&gb);
|
rustsecp256k1_v0_1_1_gej_clear(&gb);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* SECP256K1_ECMULT_GEN_IMPL_H */
|
#endif /* SECP256K1_ECMULT_GEN_IMPL_H */
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -33,100 +33,100 @@
|
||||||
#include "util.h"
|
#include "util.h"
|
||||||
|
|
||||||
/** Normalize a field element. */
|
/** Normalize a field element. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize(rustsecp256k1_v0_1_0_fe *r);
|
static void rustsecp256k1_v0_1_1_fe_normalize(rustsecp256k1_v0_1_1_fe *r);
|
||||||
|
|
||||||
/** Weakly normalize a field element: reduce it magnitude to 1, but don't fully normalize. */
|
/** Weakly normalize a field element: reduce it magnitude to 1, but don't fully normalize. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_weak(rustsecp256k1_v0_1_0_fe *r);
|
static void rustsecp256k1_v0_1_1_fe_normalize_weak(rustsecp256k1_v0_1_1_fe *r);
|
||||||
|
|
||||||
/** Normalize a field element, without constant-time guarantee. */
|
/** Normalize a field element, without constant-time guarantee. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_var(rustsecp256k1_v0_1_0_fe *r);
|
static void rustsecp256k1_v0_1_1_fe_normalize_var(rustsecp256k1_v0_1_1_fe *r);
|
||||||
|
|
||||||
/** Verify whether a field element represents zero i.e. would normalize to a zero value. The field
|
/** Verify whether a field element represents zero i.e. would normalize to a zero value. The field
|
||||||
* implementation may optionally normalize the input, but this should not be relied upon. */
|
* implementation may optionally normalize the input, but this should not be relied upon. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero(rustsecp256k1_v0_1_0_fe *r);
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero(rustsecp256k1_v0_1_1_fe *r);
|
||||||
|
|
||||||
/** Verify whether a field element represents zero i.e. would normalize to a zero value. The field
|
/** Verify whether a field element represents zero i.e. would normalize to a zero value. The field
|
||||||
* implementation may optionally normalize the input, but this should not be relied upon. */
|
* implementation may optionally normalize the input, but this should not be relied upon. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_0_fe *r);
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_1_fe *r);
|
||||||
|
|
||||||
/** Set a field element equal to a small integer. Resulting field element is normalized. */
|
/** Set a field element equal to a small integer. Resulting field element is normalized. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_set_int(rustsecp256k1_v0_1_0_fe *r, int a);
|
static void rustsecp256k1_v0_1_1_fe_set_int(rustsecp256k1_v0_1_1_fe *r, int a);
|
||||||
|
|
||||||
/** Sets a field element equal to zero, initializing all fields. */
|
/** Sets a field element equal to zero, initializing all fields. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_clear(rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_clear(rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Verify whether a field element is zero. Requires the input to be normalized. */
|
/** Verify whether a field element is zero. Requires the input to be normalized. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_is_zero(const rustsecp256k1_v0_1_0_fe *a);
|
static int rustsecp256k1_v0_1_1_fe_is_zero(const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Check the "oddness" of a field element. Requires the input to be normalized. */
|
/** Check the "oddness" of a field element. Requires the input to be normalized. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_is_odd(const rustsecp256k1_v0_1_0_fe *a);
|
static int rustsecp256k1_v0_1_1_fe_is_odd(const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Compare two field elements. Requires magnitude-1 inputs. */
|
/** Compare two field elements. Requires magnitude-1 inputs. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_equal(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b);
|
static int rustsecp256k1_v0_1_1_fe_equal(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b);
|
||||||
|
|
||||||
/** Same as rustsecp256k1_v0_1_0_fe_equal, but may be variable time. */
|
/** Same as rustsecp256k1_v0_1_1_fe_equal, but may be variable time. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_equal_var(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b);
|
static int rustsecp256k1_v0_1_1_fe_equal_var(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b);
|
||||||
|
|
||||||
/** Compare two field elements. Requires both inputs to be normalized */
|
/** Compare two field elements. Requires both inputs to be normalized */
|
||||||
static int rustsecp256k1_v0_1_0_fe_cmp_var(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b);
|
static int rustsecp256k1_v0_1_1_fe_cmp_var(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b);
|
||||||
|
|
||||||
/** Set a field element equal to 32-byte big endian value. If successful, the resulting field element is normalized. */
|
/** Set a field element equal to 32-byte big endian value. If successful, the resulting field element is normalized. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_set_b32(rustsecp256k1_v0_1_0_fe *r, const unsigned char *a);
|
static int rustsecp256k1_v0_1_1_fe_set_b32(rustsecp256k1_v0_1_1_fe *r, const unsigned char *a);
|
||||||
|
|
||||||
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
||||||
static void rustsecp256k1_v0_1_0_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Set a field element equal to the additive inverse of another. Takes a maximum magnitude of the input
|
/** Set a field element equal to the additive inverse of another. Takes a maximum magnitude of the input
|
||||||
* as an argument. The magnitude of the output is one higher. */
|
* as an argument. The magnitude of the output is one higher. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_negate(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int m);
|
static void rustsecp256k1_v0_1_1_fe_negate(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int m);
|
||||||
|
|
||||||
/** Multiplies the passed field element with a small integer constant. Multiplies the magnitude by that
|
/** Multiplies the passed field element with a small integer constant. Multiplies the magnitude by that
|
||||||
* small integer. */
|
* small integer. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_mul_int(rustsecp256k1_v0_1_0_fe *r, int a);
|
static void rustsecp256k1_v0_1_1_fe_mul_int(rustsecp256k1_v0_1_1_fe *r, int a);
|
||||||
|
|
||||||
/** Adds a field element to another. The result has the sum of the inputs' magnitudes as magnitude. */
|
/** Adds a field element to another. The result has the sum of the inputs' magnitudes as magnitude. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_add(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_add(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Sets a field element to be the product of two others. Requires the inputs' magnitudes to be at most 8.
|
/** Sets a field element to be the product of two others. Requires the inputs' magnitudes to be at most 8.
|
||||||
* The output magnitude is 1 (but not guaranteed to be normalized). */
|
* The output magnitude is 1 (but not guaranteed to be normalized). */
|
||||||
static void rustsecp256k1_v0_1_0_fe_mul(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe * SECP256K1_RESTRICT b);
|
static void rustsecp256k1_v0_1_1_fe_mul(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe * SECP256K1_RESTRICT b);
|
||||||
|
|
||||||
/** Sets a field element to be the square of another. Requires the input's magnitude to be at most 8.
|
/** Sets a field element to be the square of another. Requires the input's magnitude to be at most 8.
|
||||||
* The output magnitude is 1 (but not guaranteed to be normalized). */
|
* The output magnitude is 1 (but not guaranteed to be normalized). */
|
||||||
static void rustsecp256k1_v0_1_0_fe_sqr(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_sqr(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** If a has a square root, it is computed in r and 1 is returned. If a does not
|
/** If a has a square root, it is computed in r and 1 is returned. If a does not
|
||||||
* have a square root, the root of its negation is computed and 0 is returned.
|
* have a square root, the root of its negation is computed and 0 is returned.
|
||||||
* The input's magnitude can be at most 8. The output magnitude is 1 (but not
|
* The input's magnitude can be at most 8. The output magnitude is 1 (but not
|
||||||
* guaranteed to be normalized). The result in r will always be a square
|
* guaranteed to be normalized). The result in r will always be a square
|
||||||
* itself. */
|
* itself. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_sqrt(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a);
|
static int rustsecp256k1_v0_1_1_fe_sqrt(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Checks whether a field element is a quadratic residue. */
|
/** Checks whether a field element is a quadratic residue. */
|
||||||
static int rustsecp256k1_v0_1_0_fe_is_quad_var(const rustsecp256k1_v0_1_0_fe *a);
|
static int rustsecp256k1_v0_1_1_fe_is_quad_var(const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Sets a field element to be the (modular) inverse of another. Requires the input's magnitude to be
|
/** Sets a field element to be the (modular) inverse of another. Requires the input's magnitude to be
|
||||||
* at most 8. The output magnitude is 1 (but not guaranteed to be normalized). */
|
* at most 8. The output magnitude is 1 (but not guaranteed to be normalized). */
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_inv(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Potentially faster version of rustsecp256k1_v0_1_0_fe_inv, without constant-time guarantee. */
|
/** Potentially faster version of rustsecp256k1_v0_1_1_fe_inv, without constant-time guarantee. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv_var(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_inv_var(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Calculate the (modular) inverses of a batch of field elements. Requires the inputs' magnitudes to be
|
/** Calculate the (modular) inverses of a batch of field elements. Requires the inputs' magnitudes to be
|
||||||
* at most 8. The output magnitudes are 1 (but not guaranteed to be normalized). The inputs and
|
* at most 8. The output magnitudes are 1 (but not guaranteed to be normalized). The inputs and
|
||||||
* outputs must not overlap in memory. */
|
* outputs must not overlap in memory. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv_all_var(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, size_t len);
|
static void rustsecp256k1_v0_1_1_fe_inv_all_var(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, size_t len);
|
||||||
|
|
||||||
/** Convert a field element to the storage type. */
|
/** Convert a field element to the storage type. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_to_storage(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe *a);
|
static void rustsecp256k1_v0_1_1_fe_to_storage(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe *a);
|
||||||
|
|
||||||
/** Convert a field element back from the storage type. */
|
/** Convert a field element back from the storage type. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_from_storage(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe_storage *a);
|
static void rustsecp256k1_v0_1_1_fe_from_storage(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe_storage *a);
|
||||||
|
|
||||||
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_storage_cmov(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe_storage *a, int flag);
|
static void rustsecp256k1_v0_1_1_fe_storage_cmov(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe_storage *a, int flag);
|
||||||
|
|
||||||
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
||||||
static void rustsecp256k1_v0_1_0_fe_cmov(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int flag);
|
static void rustsecp256k1_v0_1_1_fe_cmov(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int flag);
|
||||||
|
|
||||||
#endif /* SECP256K1_FIELD_H */
|
#endif /* SECP256K1_FIELD_H */
|
||||||
|
|
|
@ -18,7 +18,7 @@ typedef struct {
|
||||||
int magnitude;
|
int magnitude;
|
||||||
int normalized;
|
int normalized;
|
||||||
#endif
|
#endif
|
||||||
} rustsecp256k1_v0_1_0_fe;
|
} rustsecp256k1_v0_1_1_fe;
|
||||||
|
|
||||||
/* Unpacks a constant into a overlapping multi-limbed FE element. */
|
/* Unpacks a constant into a overlapping multi-limbed FE element. */
|
||||||
#define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \
|
#define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \
|
||||||
|
@ -42,7 +42,7 @@ typedef struct {
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
uint32_t n[8];
|
uint32_t n[8];
|
||||||
} rustsecp256k1_v0_1_0_fe_storage;
|
} rustsecp256k1_v0_1_1_fe_storage;
|
||||||
|
|
||||||
#define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ (d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7) }}
|
#define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ (d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7) }}
|
||||||
#define SECP256K1_FE_STORAGE_CONST_GET(d) d.n[7], d.n[6], d.n[5], d.n[4],d.n[3], d.n[2], d.n[1], d.n[0]
|
#define SECP256K1_FE_STORAGE_CONST_GET(d) d.n[7], d.n[6], d.n[5], d.n[4],d.n[3], d.n[2], d.n[1], d.n[0]
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#include "field.h"
|
#include "field.h"
|
||||||
|
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
static void rustsecp256k1_v0_1_0_fe_verify(const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_verify(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
const uint32_t *d = a->n;
|
const uint32_t *d = a->n;
|
||||||
int m = a->normalized ? 1 : 2 * a->magnitude, r = 1;
|
int m = a->normalized ? 1 : 2 * a->magnitude, r = 1;
|
||||||
r &= (d[0] <= 0x3FFFFFFUL * m);
|
r &= (d[0] <= 0x3FFFFFFUL * m);
|
||||||
|
@ -39,7 +39,7 @@ static void rustsecp256k1_v0_1_0_fe_verify(const rustsecp256k1_v0_1_0_fe *a) {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
||||||
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
||||||
|
|
||||||
|
@ -90,11 +90,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_weak(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize_weak(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
||||||
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
||||||
|
|
||||||
|
@ -121,11 +121,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize_weak(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
|
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_var(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize_var(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
||||||
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
||||||
|
|
||||||
|
@ -177,11 +177,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize_var(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero(rustsecp256k1_v0_1_0_fe *r) {
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4],
|
||||||
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9];
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero(rustsecp256k1_v0_1_0_fe *r
|
||||||
return (z0 == 0) | (z1 == 0x3FFFFFFUL);
|
return (z0 == 0) | (z1 == 0x3FFFFFFUL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_0_fe *r) {
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint32_t t0, t1, t2, t3, t4, t5, t6, t7, t8, t9;
|
uint32_t t0, t1, t2, t3, t4, t5, t6, t7, t8, t9;
|
||||||
uint32_t z0, z1;
|
uint32_t z0, z1;
|
||||||
uint32_t x;
|
uint32_t x;
|
||||||
|
@ -262,34 +262,34 @@ static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_0_f
|
||||||
return (z0 == 0) | (z1 == 0x3FFFFFFUL);
|
return (z0 == 0) | (z1 == 0x3FFFFFFUL);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_set_int(rustsecp256k1_v0_1_0_fe *r, int a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_set_int(rustsecp256k1_v0_1_1_fe *r, int a) {
|
||||||
r->n[0] = a;
|
r->n[0] = a;
|
||||||
r->n[1] = r->n[2] = r->n[3] = r->n[4] = r->n[5] = r->n[6] = r->n[7] = r->n[8] = r->n[9] = 0;
|
r->n[1] = r->n[2] = r->n[3] = r->n[4] = r->n[5] = r->n[6] = r->n[7] = r->n[8] = r->n[9] = 0;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_is_zero(const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_is_zero(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
const uint32_t *t = a->n;
|
const uint32_t *t = a->n;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
return (t[0] | t[1] | t[2] | t[3] | t[4] | t[5] | t[6] | t[7] | t[8] | t[9]) == 0;
|
return (t[0] | t[1] | t[2] | t[3] | t[4] | t[5] | t[6] | t[7] | t[8] | t[9]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_is_odd(const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_is_odd(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
return a->n[0] & 1;
|
return a->n[0] & 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_clear(rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_clear(rustsecp256k1_v0_1_1_fe *a) {
|
||||||
int i;
|
int i;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
a->magnitude = 0;
|
a->magnitude = 0;
|
||||||
|
@ -300,13 +300,13 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_clear(rustsecp256k1_v0_1_0_
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_cmp_var(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b) {
|
static int rustsecp256k1_v0_1_1_fe_cmp_var(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b) {
|
||||||
int i;
|
int i;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
VERIFY_CHECK(b->normalized);
|
VERIFY_CHECK(b->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(b);
|
rustsecp256k1_v0_1_1_fe_verify(b);
|
||||||
#endif
|
#endif
|
||||||
for (i = 9; i >= 0; i--) {
|
for (i = 9; i >= 0; i--) {
|
||||||
if (a->n[i] > b->n[i]) {
|
if (a->n[i] > b->n[i]) {
|
||||||
|
@ -319,7 +319,7 @@ static int rustsecp256k1_v0_1_0_fe_cmp_var(const rustsecp256k1_v0_1_0_fe *a, con
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_set_b32(rustsecp256k1_v0_1_0_fe *r, const unsigned char *a) {
|
static int rustsecp256k1_v0_1_1_fe_set_b32(rustsecp256k1_v0_1_1_fe *r, const unsigned char *a) {
|
||||||
r->n[0] = (uint32_t)a[31] | ((uint32_t)a[30] << 8) | ((uint32_t)a[29] << 16) | ((uint32_t)(a[28] & 0x3) << 24);
|
r->n[0] = (uint32_t)a[31] | ((uint32_t)a[30] << 8) | ((uint32_t)a[29] << 16) | ((uint32_t)(a[28] & 0x3) << 24);
|
||||||
r->n[1] = (uint32_t)((a[28] >> 2) & 0x3f) | ((uint32_t)a[27] << 6) | ((uint32_t)a[26] << 14) | ((uint32_t)(a[25] & 0xf) << 22);
|
r->n[1] = (uint32_t)((a[28] >> 2) & 0x3f) | ((uint32_t)a[27] << 6) | ((uint32_t)a[26] << 14) | ((uint32_t)(a[25] & 0xf) << 22);
|
||||||
r->n[2] = (uint32_t)((a[25] >> 4) & 0xf) | ((uint32_t)a[24] << 4) | ((uint32_t)a[23] << 12) | ((uint32_t)(a[22] & 0x3f) << 20);
|
r->n[2] = (uint32_t)((a[25] >> 4) & 0xf) | ((uint32_t)a[24] << 4) | ((uint32_t)a[23] << 12) | ((uint32_t)(a[22] & 0x3f) << 20);
|
||||||
|
@ -337,16 +337,16 @@ static int rustsecp256k1_v0_1_0_fe_set_b32(rustsecp256k1_v0_1_0_fe *r, const uns
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
||||||
static void rustsecp256k1_v0_1_0_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r[0] = (a->n[9] >> 14) & 0xff;
|
r[0] = (a->n[9] >> 14) & 0xff;
|
||||||
r[1] = (a->n[9] >> 6) & 0xff;
|
r[1] = (a->n[9] >> 6) & 0xff;
|
||||||
|
@ -382,10 +382,10 @@ static void rustsecp256k1_v0_1_0_fe_get_b32(unsigned char *r, const rustsecp256k
|
||||||
r[31] = a->n[0] & 0xff;
|
r[31] = a->n[0] & 0xff;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_negate(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int m) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_negate(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int m) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= m);
|
VERIFY_CHECK(a->magnitude <= m);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r->n[0] = 0x3FFFC2FUL * 2 * (m + 1) - a->n[0];
|
r->n[0] = 0x3FFFC2FUL * 2 * (m + 1) - a->n[0];
|
||||||
r->n[1] = 0x3FFFFBFUL * 2 * (m + 1) - a->n[1];
|
r->n[1] = 0x3FFFFBFUL * 2 * (m + 1) - a->n[1];
|
||||||
|
@ -400,11 +400,11 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_negate(rustsecp256k1_v0_1_0
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = m + 1;
|
r->magnitude = m + 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_int(rustsecp256k1_v0_1_0_fe *r, int a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_mul_int(rustsecp256k1_v0_1_1_fe *r, int a) {
|
||||||
r->n[0] *= a;
|
r->n[0] *= a;
|
||||||
r->n[1] *= a;
|
r->n[1] *= a;
|
||||||
r->n[2] *= a;
|
r->n[2] *= a;
|
||||||
|
@ -418,13 +418,13 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_int(rustsecp256k1_v0_1_
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude *= a;
|
r->magnitude *= a;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_add(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_add(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r->n[0] += a->n[0];
|
r->n[0] += a->n[0];
|
||||||
r->n[1] += a->n[1];
|
r->n[1] += a->n[1];
|
||||||
|
@ -439,15 +439,15 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_add(rustsecp256k1_v0_1_0_fe
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude += a->magnitude;
|
r->magnitude += a->magnitude;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(USE_EXTERNAL_ASM)
|
#if defined(USE_EXTERNAL_ASM)
|
||||||
|
|
||||||
/* External assembler implementation */
|
/* External assembler implementation */
|
||||||
void rustsecp256k1_v0_1_0_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b);
|
void rustsecp256k1_v0_1_1_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b);
|
||||||
void rustsecp256k1_v0_1_0_fe_sqr_inner(uint32_t *r, const uint32_t *a);
|
void rustsecp256k1_v0_1_1_fe_sqr_inner(uint32_t *r, const uint32_t *a);
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
|
@ -457,7 +457,7 @@ void rustsecp256k1_v0_1_0_fe_sqr_inner(uint32_t *r, const uint32_t *a);
|
||||||
#define VERIFY_BITS(x, n) do { } while(0)
|
#define VERIFY_BITS(x, n) do { } while(0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b) {
|
||||||
uint64_t c, d;
|
uint64_t c, d;
|
||||||
uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8;
|
uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8;
|
||||||
uint32_t t9, t1, t0, t2, t3, t4, t5, t6, t7;
|
uint32_t t9, t1, t0, t2, t3, t4, t5, t6, t7;
|
||||||
|
@ -787,7 +787,7 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_inner(uint32_t *r, cons
|
||||||
/* [r9 r8 r7 r6 r5 r4 r3 r2 r1 r0] = [p18 p17 p16 p15 p14 p13 p12 p11 p10 p9 p8 p7 p6 p5 p4 p3 p2 p1 p0] */
|
/* [r9 r8 r7 r6 r5 r4 r3 r2 r1 r0] = [p18 p17 p16 p15 p14 p13 p12 p11 p10 p9 p8 p7 p6 p5 p4 p3 p2 p1 p0] */
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_sqr_inner(uint32_t *r, const uint32_t *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_sqr_inner(uint32_t *r, const uint32_t *a) {
|
||||||
uint64_t c, d;
|
uint64_t c, d;
|
||||||
uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8;
|
uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8;
|
||||||
uint32_t t9, t0, t1, t2, t3, t4, t5, t6, t7;
|
uint32_t t9, t0, t1, t2, t3, t4, t5, t6, t7;
|
||||||
|
@ -1062,37 +1062,37 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_sqr_inner(uint32_t *r, cons
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_mul(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe * SECP256K1_RESTRICT b) {
|
static void rustsecp256k1_v0_1_1_fe_mul(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe * SECP256K1_RESTRICT b) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= 8);
|
VERIFY_CHECK(a->magnitude <= 8);
|
||||||
VERIFY_CHECK(b->magnitude <= 8);
|
VERIFY_CHECK(b->magnitude <= 8);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(b);
|
rustsecp256k1_v0_1_1_fe_verify(b);
|
||||||
VERIFY_CHECK(r != b);
|
VERIFY_CHECK(r != b);
|
||||||
VERIFY_CHECK(a != b);
|
VERIFY_CHECK(a != b);
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_fe_mul_inner(r->n, a->n, b->n);
|
rustsecp256k1_v0_1_1_fe_mul_inner(r->n, a->n, b->n);
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_sqr(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_sqr(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= 8);
|
VERIFY_CHECK(a->magnitude <= 8);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_fe_sqr_inner(r->n, a->n);
|
rustsecp256k1_v0_1_1_fe_sqr_inner(r->n, a->n);
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_cmov(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int flag) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_cmov(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int flag) {
|
||||||
uint32_t mask0, mask1;
|
uint32_t mask0, mask1;
|
||||||
mask0 = flag + ~((uint32_t)0);
|
mask0 = flag + ~((uint32_t)0);
|
||||||
mask1 = ~mask0;
|
mask1 = ~mask0;
|
||||||
|
@ -1114,7 +1114,7 @@ static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_cmov(rustsecp256k1_v0_1_0_f
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_storage_cmov(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe_storage *a, int flag) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_storage_cmov(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe_storage *a, int flag) {
|
||||||
uint32_t mask0, mask1;
|
uint32_t mask0, mask1;
|
||||||
mask0 = flag + ~((uint32_t)0);
|
mask0 = flag + ~((uint32_t)0);
|
||||||
mask1 = ~mask0;
|
mask1 = ~mask0;
|
||||||
|
@ -1128,7 +1128,7 @@ static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_storage_cmov(rustsecp256k1_
|
||||||
r->n[7] = (r->n[7] & mask0) | (a->n[7] & mask1);
|
r->n[7] = (r->n[7] & mask0) | (a->n[7] & mask1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_to_storage(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_to_storage(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
#endif
|
#endif
|
||||||
|
@ -1142,7 +1142,7 @@ static void rustsecp256k1_v0_1_0_fe_to_storage(rustsecp256k1_v0_1_0_fe_storage *
|
||||||
r->n[7] = a->n[8] >> 16 | a->n[9] << 10;
|
r->n[7] = a->n[8] >> 16 | a->n[9] << 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_from_storage(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe_storage *a) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_from_storage(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe_storage *a) {
|
||||||
r->n[0] = a->n[0] & 0x3FFFFFFUL;
|
r->n[0] = a->n[0] & 0x3FFFFFFUL;
|
||||||
r->n[1] = a->n[0] >> 26 | ((a->n[1] << 6) & 0x3FFFFFFUL);
|
r->n[1] = a->n[0] >> 26 | ((a->n[1] << 6) & 0x3FFFFFFUL);
|
||||||
r->n[2] = a->n[1] >> 20 | ((a->n[2] << 12) & 0x3FFFFFFUL);
|
r->n[2] = a->n[1] >> 20 | ((a->n[2] << 12) & 0x3FFFFFFUL);
|
||||||
|
|
|
@ -18,7 +18,7 @@ typedef struct {
|
||||||
int magnitude;
|
int magnitude;
|
||||||
int normalized;
|
int normalized;
|
||||||
#endif
|
#endif
|
||||||
} rustsecp256k1_v0_1_0_fe;
|
} rustsecp256k1_v0_1_1_fe;
|
||||||
|
|
||||||
/* Unpacks a constant into a overlapping multi-limbed FE element. */
|
/* Unpacks a constant into a overlapping multi-limbed FE element. */
|
||||||
#define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \
|
#define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \
|
||||||
|
@ -37,7 +37,7 @@ typedef struct {
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
uint64_t n[4];
|
uint64_t n[4];
|
||||||
} rustsecp256k1_v0_1_0_fe_storage;
|
} rustsecp256k1_v0_1_1_fe_storage;
|
||||||
|
|
||||||
#define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ \
|
#define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ \
|
||||||
(d0) | (((uint64_t)(d1)) << 32), \
|
(d0) | (((uint64_t)(d1)) << 32), \
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
#ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
|
#ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
|
||||||
#define SECP256K1_FIELD_INNER5X52_IMPL_H
|
#define SECP256K1_FIELD_INNER5X52_IMPL_H
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
|
||||||
/**
|
/**
|
||||||
* Registers: rdx:rax = multiplication accumulator
|
* Registers: rdx:rax = multiplication accumulator
|
||||||
* r9:r8 = c
|
* r9:r8 = c
|
||||||
|
@ -284,7 +284,7 @@ __asm__ __volatile__(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
|
||||||
/**
|
/**
|
||||||
* Registers: rdx:rax = multiplication accumulator
|
* Registers: rdx:rax = multiplication accumulator
|
||||||
* r9:r8 = c
|
* r9:r8 = c
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
static void rustsecp256k1_v0_1_0_fe_verify(const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_verify(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
const uint64_t *d = a->n;
|
const uint64_t *d = a->n;
|
||||||
int m = a->normalized ? 1 : 2 * a->magnitude, r = 1;
|
int m = a->normalized ? 1 : 2 * a->magnitude, r = 1;
|
||||||
/* secp256k1 'p' value defined in "Standards for Efficient Cryptography" (SEC2) 2.7.1. */
|
/* secp256k1 'p' value defined in "Standards for Efficient Cryptography" (SEC2) 2.7.1. */
|
||||||
|
@ -50,7 +50,7 @@ static void rustsecp256k1_v0_1_0_fe_verify(const rustsecp256k1_v0_1_0_fe *a) {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
||||||
|
|
||||||
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
||||||
|
@ -89,11 +89,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_weak(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize_weak(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
||||||
|
|
||||||
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
||||||
|
@ -113,11 +113,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize_weak(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
|
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_normalize_var(rustsecp256k1_v0_1_0_fe *r) {
|
static void rustsecp256k1_v0_1_1_fe_normalize_var(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
||||||
|
|
||||||
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
/* Reduce t4 at the start so there will be at most a single carry from the first pass */
|
||||||
|
@ -157,11 +157,11 @@ static void rustsecp256k1_v0_1_0_fe_normalize_var(rustsecp256k1_v0_1_0_fe *r) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero(rustsecp256k1_v0_1_0_fe *r) {
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4];
|
||||||
|
|
||||||
/* z0 tracks a possible raw value of 0, z1 tracks a possible raw value of P */
|
/* z0 tracks a possible raw value of 0, z1 tracks a possible raw value of P */
|
||||||
|
@ -184,7 +184,7 @@ static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero(rustsecp256k1_v0_1_0_fe *r
|
||||||
return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL);
|
return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_0_fe *r) {
|
static int rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_1_fe *r) {
|
||||||
uint64_t t0, t1, t2, t3, t4;
|
uint64_t t0, t1, t2, t3, t4;
|
||||||
uint64_t z0, z1;
|
uint64_t z0, z1;
|
||||||
uint64_t x;
|
uint64_t x;
|
||||||
|
@ -225,34 +225,34 @@ static int rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(rustsecp256k1_v0_1_0_f
|
||||||
return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL);
|
return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_set_int(rustsecp256k1_v0_1_0_fe *r, int a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_set_int(rustsecp256k1_v0_1_1_fe *r, int a) {
|
||||||
r->n[0] = a;
|
r->n[0] = a;
|
||||||
r->n[1] = r->n[2] = r->n[3] = r->n[4] = 0;
|
r->n[1] = r->n[2] = r->n[3] = r->n[4] = 0;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_is_zero(const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_is_zero(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
const uint64_t *t = a->n;
|
const uint64_t *t = a->n;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
return (t[0] | t[1] | t[2] | t[3] | t[4]) == 0;
|
return (t[0] | t[1] | t[2] | t[3] | t[4]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_is_odd(const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_is_odd(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
return a->n[0] & 1;
|
return a->n[0] & 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_clear(rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_clear(rustsecp256k1_v0_1_1_fe *a) {
|
||||||
int i;
|
int i;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
a->magnitude = 0;
|
a->magnitude = 0;
|
||||||
|
@ -263,13 +263,13 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_clear(rustsecp256k1_v0_1_0_
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_cmp_var(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b) {
|
static int rustsecp256k1_v0_1_1_fe_cmp_var(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b) {
|
||||||
int i;
|
int i;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
VERIFY_CHECK(b->normalized);
|
VERIFY_CHECK(b->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(b);
|
rustsecp256k1_v0_1_1_fe_verify(b);
|
||||||
#endif
|
#endif
|
||||||
for (i = 4; i >= 0; i--) {
|
for (i = 4; i >= 0; i--) {
|
||||||
if (a->n[i] > b->n[i]) {
|
if (a->n[i] > b->n[i]) {
|
||||||
|
@ -282,7 +282,7 @@ static int rustsecp256k1_v0_1_0_fe_cmp_var(const rustsecp256k1_v0_1_0_fe *a, con
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_set_b32(rustsecp256k1_v0_1_0_fe *r, const unsigned char *a) {
|
static int rustsecp256k1_v0_1_1_fe_set_b32(rustsecp256k1_v0_1_1_fe *r, const unsigned char *a) {
|
||||||
r->n[0] = (uint64_t)a[31]
|
r->n[0] = (uint64_t)a[31]
|
||||||
| ((uint64_t)a[30] << 8)
|
| ((uint64_t)a[30] << 8)
|
||||||
| ((uint64_t)a[29] << 16)
|
| ((uint64_t)a[29] << 16)
|
||||||
|
@ -323,16 +323,16 @@ static int rustsecp256k1_v0_1_0_fe_set_b32(rustsecp256k1_v0_1_0_fe *r, const uns
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 1;
|
r->normalized = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
/** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */
|
||||||
static void rustsecp256k1_v0_1_0_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_get_b32(unsigned char *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r[0] = (a->n[4] >> 40) & 0xFF;
|
r[0] = (a->n[4] >> 40) & 0xFF;
|
||||||
r[1] = (a->n[4] >> 32) & 0xFF;
|
r[1] = (a->n[4] >> 32) & 0xFF;
|
||||||
|
@ -368,10 +368,10 @@ static void rustsecp256k1_v0_1_0_fe_get_b32(unsigned char *r, const rustsecp256k
|
||||||
r[31] = a->n[0] & 0xFF;
|
r[31] = a->n[0] & 0xFF;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_negate(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int m) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_negate(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int m) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= m);
|
VERIFY_CHECK(a->magnitude <= m);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r->n[0] = 0xFFFFEFFFFFC2FULL * 2 * (m + 1) - a->n[0];
|
r->n[0] = 0xFFFFEFFFFFC2FULL * 2 * (m + 1) - a->n[0];
|
||||||
r->n[1] = 0xFFFFFFFFFFFFFULL * 2 * (m + 1) - a->n[1];
|
r->n[1] = 0xFFFFFFFFFFFFFULL * 2 * (m + 1) - a->n[1];
|
||||||
|
@ -381,11 +381,11 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_negate(rustsecp256k1_v0_1_0
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = m + 1;
|
r->magnitude = m + 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_int(rustsecp256k1_v0_1_0_fe *r, int a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_mul_int(rustsecp256k1_v0_1_1_fe *r, int a) {
|
||||||
r->n[0] *= a;
|
r->n[0] *= a;
|
||||||
r->n[1] *= a;
|
r->n[1] *= a;
|
||||||
r->n[2] *= a;
|
r->n[2] *= a;
|
||||||
|
@ -394,13 +394,13 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_int(rustsecp256k1_v0_1_
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude *= a;
|
r->magnitude *= a;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_add(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_add(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
r->n[0] += a->n[0];
|
r->n[0] += a->n[0];
|
||||||
r->n[1] += a->n[1];
|
r->n[1] += a->n[1];
|
||||||
|
@ -410,41 +410,41 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_add(rustsecp256k1_v0_1_0_fe
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude += a->magnitude;
|
r->magnitude += a->magnitude;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_mul(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe * SECP256K1_RESTRICT b) {
|
static void rustsecp256k1_v0_1_1_fe_mul(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe * SECP256K1_RESTRICT b) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= 8);
|
VERIFY_CHECK(a->magnitude <= 8);
|
||||||
VERIFY_CHECK(b->magnitude <= 8);
|
VERIFY_CHECK(b->magnitude <= 8);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(b);
|
rustsecp256k1_v0_1_1_fe_verify(b);
|
||||||
VERIFY_CHECK(r != b);
|
VERIFY_CHECK(r != b);
|
||||||
VERIFY_CHECK(a != b);
|
VERIFY_CHECK(a != b);
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_fe_mul_inner(r->n, a->n, b->n);
|
rustsecp256k1_v0_1_1_fe_mul_inner(r->n, a->n, b->n);
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_sqr(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_sqr(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->magnitude <= 8);
|
VERIFY_CHECK(a->magnitude <= 8);
|
||||||
rustsecp256k1_v0_1_0_fe_verify(a);
|
rustsecp256k1_v0_1_1_fe_verify(a);
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_fe_sqr_inner(r->n, a->n);
|
rustsecp256k1_v0_1_1_fe_sqr_inner(r->n, a->n);
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
r->magnitude = 1;
|
r->magnitude = 1;
|
||||||
r->normalized = 0;
|
r->normalized = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_verify(r);
|
rustsecp256k1_v0_1_1_fe_verify(r);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_cmov(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, int flag) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_cmov(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, int flag) {
|
||||||
uint64_t mask0, mask1;
|
uint64_t mask0, mask1;
|
||||||
mask0 = flag + ~((uint64_t)0);
|
mask0 = flag + ~((uint64_t)0);
|
||||||
mask1 = ~mask0;
|
mask1 = ~mask0;
|
||||||
|
@ -461,7 +461,7 @@ static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_cmov(rustsecp256k1_v0_1_0_f
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_storage_cmov(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe_storage *a, int flag) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_storage_cmov(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe_storage *a, int flag) {
|
||||||
uint64_t mask0, mask1;
|
uint64_t mask0, mask1;
|
||||||
mask0 = flag + ~((uint64_t)0);
|
mask0 = flag + ~((uint64_t)0);
|
||||||
mask1 = ~mask0;
|
mask1 = ~mask0;
|
||||||
|
@ -471,7 +471,7 @@ static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_storage_cmov(rustsecp256k1_
|
||||||
r->n[3] = (r->n[3] & mask0) | (a->n[3] & mask1);
|
r->n[3] = (r->n[3] & mask0) | (a->n[3] & mask1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_to_storage(rustsecp256k1_v0_1_0_fe_storage *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_to_storage(rustsecp256k1_v0_1_1_fe_storage *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(a->normalized);
|
VERIFY_CHECK(a->normalized);
|
||||||
#endif
|
#endif
|
||||||
|
@ -481,7 +481,7 @@ static void rustsecp256k1_v0_1_0_fe_to_storage(rustsecp256k1_v0_1_0_fe_storage *
|
||||||
r->n[3] = a->n[3] >> 36 | a->n[4] << 16;
|
r->n[3] = a->n[3] >> 36 | a->n[4] << 16;
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_fe_from_storage(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe_storage *a) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_fe_from_storage(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe_storage *a) {
|
||||||
r->n[0] = a->n[0] & 0xFFFFFFFFFFFFFULL;
|
r->n[0] = a->n[0] & 0xFFFFFFFFFFFFFULL;
|
||||||
r->n[1] = a->n[0] >> 52 | ((a->n[1] << 12) & 0xFFFFFFFFFFFFFULL);
|
r->n[1] = a->n[0] >> 52 | ((a->n[1] << 12) & 0xFFFFFFFFFFFFFULL);
|
||||||
r->n[2] = a->n[1] >> 40 | ((a->n[2] << 24) & 0xFFFFFFFFFFFFFULL);
|
r->n[2] = a->n[1] >> 40 | ((a->n[2] << 24) & 0xFFFFFFFFFFFFFULL);
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
#define VERIFY_BITS(x, n) do { } while(0)
|
#define VERIFY_BITS(x, n) do { } while(0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
|
||||||
uint128_t c, d;
|
uint128_t c, d;
|
||||||
uint64_t t3, t4, tx, u0;
|
uint64_t t3, t4, tx, u0;
|
||||||
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
|
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
|
||||||
|
@ -154,7 +154,7 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_mul_inner(uint64_t *r, cons
|
||||||
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
|
/* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
|
||||||
uint128_t c, d;
|
uint128_t c, d;
|
||||||
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
|
uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
|
||||||
int64_t t3, t4, tx, u0;
|
int64_t t3, t4, tx, u0;
|
||||||
|
|
|
@ -22,21 +22,21 @@
|
||||||
#error "Please select field implementation"
|
#error "Please select field implementation"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_equal(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_equal(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b) {
|
||||||
rustsecp256k1_v0_1_0_fe na;
|
rustsecp256k1_v0_1_1_fe na;
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&na, a, 1);
|
rustsecp256k1_v0_1_1_fe_negate(&na, a, 1);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&na, b);
|
rustsecp256k1_v0_1_1_fe_add(&na, b);
|
||||||
return rustsecp256k1_v0_1_0_fe_normalizes_to_zero(&na);
|
return rustsecp256k1_v0_1_1_fe_normalizes_to_zero(&na);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_fe_equal_var(const rustsecp256k1_v0_1_0_fe *a, const rustsecp256k1_v0_1_0_fe *b) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_fe_equal_var(const rustsecp256k1_v0_1_1_fe *a, const rustsecp256k1_v0_1_1_fe *b) {
|
||||||
rustsecp256k1_v0_1_0_fe na;
|
rustsecp256k1_v0_1_1_fe na;
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&na, a, 1);
|
rustsecp256k1_v0_1_1_fe_negate(&na, a, 1);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&na, b);
|
rustsecp256k1_v0_1_1_fe_add(&na, b);
|
||||||
return rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&na);
|
return rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&na);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_sqrt(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static int rustsecp256k1_v0_1_1_fe_sqrt(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
/** Given that p is congruent to 3 mod 4, we can compute the square root of
|
/** Given that p is congruent to 3 mod 4, we can compute the square root of
|
||||||
* a mod p as the (p+1)/4'th power of a.
|
* a mod p as the (p+1)/4'th power of a.
|
||||||
*
|
*
|
||||||
|
@ -46,7 +46,7 @@ static int rustsecp256k1_v0_1_0_fe_sqrt(rustsecp256k1_v0_1_0_fe *r, const rustse
|
||||||
* Also because (p+1)/4 is an even number, the computed square root is
|
* Also because (p+1)/4 is an even number, the computed square root is
|
||||||
* itself always a square (a ** ((p+1)/4) is the square of a ** ((p+1)/8)).
|
* itself always a square (a ** ((p+1)/4) is the square of a ** ((p+1)/8)).
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1;
|
rustsecp256k1_v0_1_1_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1;
|
||||||
int j;
|
int j;
|
||||||
|
|
||||||
VERIFY_CHECK(r != a);
|
VERIFY_CHECK(r != a);
|
||||||
|
@ -56,88 +56,88 @@ static int rustsecp256k1_v0_1_0_fe_sqrt(rustsecp256k1_v0_1_0_fe *r, const rustse
|
||||||
* 1, [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223]
|
* 1, [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x2, a);
|
rustsecp256k1_v0_1_1_fe_sqr(&x2, a);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x2, &x2, a);
|
rustsecp256k1_v0_1_1_fe_mul(&x2, &x2, a);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x3, &x2);
|
rustsecp256k1_v0_1_1_fe_sqr(&x3, &x2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x3, &x3, a);
|
rustsecp256k1_v0_1_1_fe_mul(&x3, &x3, a);
|
||||||
|
|
||||||
x6 = x3;
|
x6 = x3;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x6, &x6);
|
rustsecp256k1_v0_1_1_fe_sqr(&x6, &x6);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x6, &x6, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x6, &x6, &x3);
|
||||||
|
|
||||||
x9 = x6;
|
x9 = x6;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x9, &x9);
|
rustsecp256k1_v0_1_1_fe_sqr(&x9, &x9);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x9, &x9, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x9, &x9, &x3);
|
||||||
|
|
||||||
x11 = x9;
|
x11 = x9;
|
||||||
for (j=0; j<2; j++) {
|
for (j=0; j<2; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x11, &x11);
|
rustsecp256k1_v0_1_1_fe_sqr(&x11, &x11);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x11, &x11, &x2);
|
rustsecp256k1_v0_1_1_fe_mul(&x11, &x11, &x2);
|
||||||
|
|
||||||
x22 = x11;
|
x22 = x11;
|
||||||
for (j=0; j<11; j++) {
|
for (j=0; j<11; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x22, &x22);
|
rustsecp256k1_v0_1_1_fe_sqr(&x22, &x22);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x22, &x22, &x11);
|
rustsecp256k1_v0_1_1_fe_mul(&x22, &x22, &x11);
|
||||||
|
|
||||||
x44 = x22;
|
x44 = x22;
|
||||||
for (j=0; j<22; j++) {
|
for (j=0; j<22; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x44, &x44);
|
rustsecp256k1_v0_1_1_fe_sqr(&x44, &x44);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x44, &x44, &x22);
|
rustsecp256k1_v0_1_1_fe_mul(&x44, &x44, &x22);
|
||||||
|
|
||||||
x88 = x44;
|
x88 = x44;
|
||||||
for (j=0; j<44; j++) {
|
for (j=0; j<44; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x88, &x88);
|
rustsecp256k1_v0_1_1_fe_sqr(&x88, &x88);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x88, &x88, &x44);
|
rustsecp256k1_v0_1_1_fe_mul(&x88, &x88, &x44);
|
||||||
|
|
||||||
x176 = x88;
|
x176 = x88;
|
||||||
for (j=0; j<88; j++) {
|
for (j=0; j<88; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x176, &x176);
|
rustsecp256k1_v0_1_1_fe_sqr(&x176, &x176);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x176, &x176, &x88);
|
rustsecp256k1_v0_1_1_fe_mul(&x176, &x176, &x88);
|
||||||
|
|
||||||
x220 = x176;
|
x220 = x176;
|
||||||
for (j=0; j<44; j++) {
|
for (j=0; j<44; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x220, &x220);
|
rustsecp256k1_v0_1_1_fe_sqr(&x220, &x220);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x220, &x220, &x44);
|
rustsecp256k1_v0_1_1_fe_mul(&x220, &x220, &x44);
|
||||||
|
|
||||||
x223 = x220;
|
x223 = x220;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x223, &x223);
|
rustsecp256k1_v0_1_1_fe_sqr(&x223, &x223);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x223, &x223, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x223, &x223, &x3);
|
||||||
|
|
||||||
/* The final result is then assembled using a sliding window over the blocks. */
|
/* The final result is then assembled using a sliding window over the blocks. */
|
||||||
|
|
||||||
t1 = x223;
|
t1 = x223;
|
||||||
for (j=0; j<23; j++) {
|
for (j=0; j<23; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t1, &t1, &x22);
|
rustsecp256k1_v0_1_1_fe_mul(&t1, &t1, &x22);
|
||||||
for (j=0; j<6; j++) {
|
for (j=0; j<6; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t1, &t1, &x2);
|
rustsecp256k1_v0_1_1_fe_mul(&t1, &t1, &x2);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(r, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(r, &t1);
|
||||||
|
|
||||||
/* Check that a square root was actually calculated */
|
/* Check that a square root was actually calculated */
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, r);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, r);
|
||||||
return rustsecp256k1_v0_1_0_fe_equal(&t1, a);
|
return rustsecp256k1_v0_1_1_fe_equal(&t1, a);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_inv(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
rustsecp256k1_v0_1_0_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1;
|
rustsecp256k1_v0_1_1_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1;
|
||||||
int j;
|
int j;
|
||||||
|
|
||||||
/** The binary representation of (p - 2) has 5 blocks of 1s, with lengths in
|
/** The binary representation of (p - 2) has 5 blocks of 1s, with lengths in
|
||||||
|
@ -145,93 +145,93 @@ static void rustsecp256k1_v0_1_0_fe_inv(rustsecp256k1_v0_1_0_fe *r, const rustse
|
||||||
* [1], [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223]
|
* [1], [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x2, a);
|
rustsecp256k1_v0_1_1_fe_sqr(&x2, a);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x2, &x2, a);
|
rustsecp256k1_v0_1_1_fe_mul(&x2, &x2, a);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x3, &x2);
|
rustsecp256k1_v0_1_1_fe_sqr(&x3, &x2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x3, &x3, a);
|
rustsecp256k1_v0_1_1_fe_mul(&x3, &x3, a);
|
||||||
|
|
||||||
x6 = x3;
|
x6 = x3;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x6, &x6);
|
rustsecp256k1_v0_1_1_fe_sqr(&x6, &x6);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x6, &x6, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x6, &x6, &x3);
|
||||||
|
|
||||||
x9 = x6;
|
x9 = x6;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x9, &x9);
|
rustsecp256k1_v0_1_1_fe_sqr(&x9, &x9);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x9, &x9, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x9, &x9, &x3);
|
||||||
|
|
||||||
x11 = x9;
|
x11 = x9;
|
||||||
for (j=0; j<2; j++) {
|
for (j=0; j<2; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x11, &x11);
|
rustsecp256k1_v0_1_1_fe_sqr(&x11, &x11);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x11, &x11, &x2);
|
rustsecp256k1_v0_1_1_fe_mul(&x11, &x11, &x2);
|
||||||
|
|
||||||
x22 = x11;
|
x22 = x11;
|
||||||
for (j=0; j<11; j++) {
|
for (j=0; j<11; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x22, &x22);
|
rustsecp256k1_v0_1_1_fe_sqr(&x22, &x22);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x22, &x22, &x11);
|
rustsecp256k1_v0_1_1_fe_mul(&x22, &x22, &x11);
|
||||||
|
|
||||||
x44 = x22;
|
x44 = x22;
|
||||||
for (j=0; j<22; j++) {
|
for (j=0; j<22; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x44, &x44);
|
rustsecp256k1_v0_1_1_fe_sqr(&x44, &x44);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x44, &x44, &x22);
|
rustsecp256k1_v0_1_1_fe_mul(&x44, &x44, &x22);
|
||||||
|
|
||||||
x88 = x44;
|
x88 = x44;
|
||||||
for (j=0; j<44; j++) {
|
for (j=0; j<44; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x88, &x88);
|
rustsecp256k1_v0_1_1_fe_sqr(&x88, &x88);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x88, &x88, &x44);
|
rustsecp256k1_v0_1_1_fe_mul(&x88, &x88, &x44);
|
||||||
|
|
||||||
x176 = x88;
|
x176 = x88;
|
||||||
for (j=0; j<88; j++) {
|
for (j=0; j<88; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x176, &x176);
|
rustsecp256k1_v0_1_1_fe_sqr(&x176, &x176);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x176, &x176, &x88);
|
rustsecp256k1_v0_1_1_fe_mul(&x176, &x176, &x88);
|
||||||
|
|
||||||
x220 = x176;
|
x220 = x176;
|
||||||
for (j=0; j<44; j++) {
|
for (j=0; j<44; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x220, &x220);
|
rustsecp256k1_v0_1_1_fe_sqr(&x220, &x220);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x220, &x220, &x44);
|
rustsecp256k1_v0_1_1_fe_mul(&x220, &x220, &x44);
|
||||||
|
|
||||||
x223 = x220;
|
x223 = x220;
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x223, &x223);
|
rustsecp256k1_v0_1_1_fe_sqr(&x223, &x223);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x223, &x223, &x3);
|
rustsecp256k1_v0_1_1_fe_mul(&x223, &x223, &x3);
|
||||||
|
|
||||||
/* The final result is then assembled using a sliding window over the blocks. */
|
/* The final result is then assembled using a sliding window over the blocks. */
|
||||||
|
|
||||||
t1 = x223;
|
t1 = x223;
|
||||||
for (j=0; j<23; j++) {
|
for (j=0; j<23; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t1, &t1, &x22);
|
rustsecp256k1_v0_1_1_fe_mul(&t1, &t1, &x22);
|
||||||
for (j=0; j<5; j++) {
|
for (j=0; j<5; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t1, &t1, a);
|
rustsecp256k1_v0_1_1_fe_mul(&t1, &t1, a);
|
||||||
for (j=0; j<3; j++) {
|
for (j=0; j<3; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t1, &t1, &x2);
|
rustsecp256k1_v0_1_1_fe_mul(&t1, &t1, &x2);
|
||||||
for (j=0; j<2; j++) {
|
for (j=0; j<2; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &t1);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &t1);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(r, a, &t1);
|
rustsecp256k1_v0_1_1_fe_mul(r, a, &t1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv_var(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a) {
|
static void rustsecp256k1_v0_1_1_fe_inv_var(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#if defined(USE_FIELD_INV_BUILTIN)
|
#if defined(USE_FIELD_INV_BUILTIN)
|
||||||
rustsecp256k1_v0_1_0_fe_inv(r, a);
|
rustsecp256k1_v0_1_1_fe_inv(r, a);
|
||||||
#elif defined(USE_FIELD_INV_NUM)
|
#elif defined(USE_FIELD_INV_NUM)
|
||||||
rustsecp256k1_v0_1_0_num n, m;
|
rustsecp256k1_v0_1_1_num n, m;
|
||||||
static const rustsecp256k1_v0_1_0_fe negone = SECP256K1_FE_CONST(
|
static const rustsecp256k1_v0_1_1_fe negone = SECP256K1_FE_CONST(
|
||||||
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL,
|
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL,
|
||||||
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL, 0xFFFFFC2EUL
|
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL, 0xFFFFFC2EUL
|
||||||
);
|
);
|
||||||
|
@ -244,27 +244,27 @@ static void rustsecp256k1_v0_1_0_fe_inv_var(rustsecp256k1_v0_1_0_fe *r, const ru
|
||||||
};
|
};
|
||||||
unsigned char b[32];
|
unsigned char b[32];
|
||||||
int res;
|
int res;
|
||||||
rustsecp256k1_v0_1_0_fe c = *a;
|
rustsecp256k1_v0_1_1_fe c = *a;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&c);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&c);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(b, &c);
|
rustsecp256k1_v0_1_1_fe_get_b32(b, &c);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(&n, b, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(&n, b, 32);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(&m, prime, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(&m, prime, 32);
|
||||||
rustsecp256k1_v0_1_0_num_mod_inverse(&n, &n, &m);
|
rustsecp256k1_v0_1_1_num_mod_inverse(&n, &n, &m);
|
||||||
rustsecp256k1_v0_1_0_num_get_bin(b, 32, &n);
|
rustsecp256k1_v0_1_1_num_get_bin(b, 32, &n);
|
||||||
res = rustsecp256k1_v0_1_0_fe_set_b32(r, b);
|
res = rustsecp256k1_v0_1_1_fe_set_b32(r, b);
|
||||||
(void)res;
|
(void)res;
|
||||||
VERIFY_CHECK(res);
|
VERIFY_CHECK(res);
|
||||||
/* Verify the result is the (unique) valid inverse using non-GMP code. */
|
/* Verify the result is the (unique) valid inverse using non-GMP code. */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&c, &c, r);
|
rustsecp256k1_v0_1_1_fe_mul(&c, &c, r);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&c, &negone);
|
rustsecp256k1_v0_1_1_fe_add(&c, &negone);
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&c));
|
CHECK(rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&c));
|
||||||
#else
|
#else
|
||||||
#error "Please select field inverse implementation"
|
#error "Please select field inverse implementation"
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_fe_inv_all_var(rustsecp256k1_v0_1_0_fe *r, const rustsecp256k1_v0_1_0_fe *a, size_t len) {
|
static void rustsecp256k1_v0_1_1_fe_inv_all_var(rustsecp256k1_v0_1_1_fe *r, const rustsecp256k1_v0_1_1_fe *a, size_t len) {
|
||||||
rustsecp256k1_v0_1_0_fe u;
|
rustsecp256k1_v0_1_1_fe u;
|
||||||
size_t i;
|
size_t i;
|
||||||
if (len < 1) {
|
if (len < 1) {
|
||||||
return;
|
return;
|
||||||
|
@ -276,25 +276,25 @@ static void rustsecp256k1_v0_1_0_fe_inv_all_var(rustsecp256k1_v0_1_0_fe *r, cons
|
||||||
|
|
||||||
i = 0;
|
i = 0;
|
||||||
while (++i < len) {
|
while (++i < len) {
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r[i], &r[i - 1], &a[i]);
|
rustsecp256k1_v0_1_1_fe_mul(&r[i], &r[i - 1], &a[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_inv_var(&u, &r[--i]);
|
rustsecp256k1_v0_1_1_fe_inv_var(&u, &r[--i]);
|
||||||
|
|
||||||
while (i > 0) {
|
while (i > 0) {
|
||||||
size_t j = i--;
|
size_t j = i--;
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r[j], &r[i], &u);
|
rustsecp256k1_v0_1_1_fe_mul(&r[j], &r[i], &u);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u, &u, &a[j]);
|
rustsecp256k1_v0_1_1_fe_mul(&u, &u, &a[j]);
|
||||||
}
|
}
|
||||||
|
|
||||||
r[0] = u;
|
r[0] = u;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_fe_is_quad_var(const rustsecp256k1_v0_1_0_fe *a) {
|
static int rustsecp256k1_v0_1_1_fe_is_quad_var(const rustsecp256k1_v0_1_1_fe *a) {
|
||||||
#ifndef USE_NUM_NONE
|
#ifndef USE_NUM_NONE
|
||||||
unsigned char b[32];
|
unsigned char b[32];
|
||||||
rustsecp256k1_v0_1_0_num n;
|
rustsecp256k1_v0_1_1_num n;
|
||||||
rustsecp256k1_v0_1_0_num m;
|
rustsecp256k1_v0_1_1_num m;
|
||||||
/* secp256k1 field prime, value p defined in "Standards for Efficient Cryptography" (SEC2) 2.7.1. */
|
/* secp256k1 field prime, value p defined in "Standards for Efficient Cryptography" (SEC2) 2.7.1. */
|
||||||
static const unsigned char prime[32] = {
|
static const unsigned char prime[32] = {
|
||||||
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
||||||
|
@ -303,15 +303,15 @@ static int rustsecp256k1_v0_1_0_fe_is_quad_var(const rustsecp256k1_v0_1_0_fe *a)
|
||||||
0xFF,0xFF,0xFF,0xFE,0xFF,0xFF,0xFC,0x2F
|
0xFF,0xFF,0xFF,0xFE,0xFF,0xFF,0xFC,0x2F
|
||||||
};
|
};
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe c = *a;
|
rustsecp256k1_v0_1_1_fe c = *a;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&c);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&c);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(b, &c);
|
rustsecp256k1_v0_1_1_fe_get_b32(b, &c);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(&n, b, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(&n, b, 32);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(&m, prime, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(&m, prime, 32);
|
||||||
return rustsecp256k1_v0_1_0_num_jacobi(&n, &m) >= 0;
|
return rustsecp256k1_v0_1_1_num_jacobi(&n, &m) >= 0;
|
||||||
#else
|
#else
|
||||||
rustsecp256k1_v0_1_0_fe r;
|
rustsecp256k1_v0_1_1_fe r;
|
||||||
return rustsecp256k1_v0_1_0_fe_sqrt(&r, a);
|
return rustsecp256k1_v0_1_1_fe_sqrt(&r, a);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,13 +20,13 @@ static void default_error_callback_fn(const char* str, void* data) {
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
static const rustsecp256k1_v0_1_0_callback default_error_callback = {
|
static const rustsecp256k1_v0_1_1_callback default_error_callback = {
|
||||||
default_error_callback_fn,
|
default_error_callback_fn,
|
||||||
NULL
|
NULL
|
||||||
};
|
};
|
||||||
|
|
||||||
int main(int argc, char **argv) {
|
int main(int argc, char **argv) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context ctx;
|
rustsecp256k1_v0_1_1_ecmult_gen_context ctx;
|
||||||
void *prealloc, *base;
|
void *prealloc, *base;
|
||||||
int inner;
|
int inner;
|
||||||
int outer;
|
int outer;
|
||||||
|
@ -45,12 +45,12 @@ int main(int argc, char **argv) {
|
||||||
fprintf(fp, "#define _SECP256K1_ECMULT_STATIC_CONTEXT_\n");
|
fprintf(fp, "#define _SECP256K1_ECMULT_STATIC_CONTEXT_\n");
|
||||||
fprintf(fp, "#include \"src/group.h\"\n");
|
fprintf(fp, "#include \"src/group.h\"\n");
|
||||||
fprintf(fp, "#define SC SECP256K1_GE_STORAGE_CONST\n");
|
fprintf(fp, "#define SC SECP256K1_GE_STORAGE_CONST\n");
|
||||||
fprintf(fp, "static const rustsecp256k1_v0_1_0_ge_storage rustsecp256k1_v0_1_0_ecmult_static_context[64][16] = {\n");
|
fprintf(fp, "static const rustsecp256k1_v0_1_1_ge_storage rustsecp256k1_v0_1_1_ecmult_static_context[64][16] = {\n");
|
||||||
|
|
||||||
base = checked_malloc(&default_error_callback, SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE);
|
base = checked_malloc(&default_error_callback, SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE);
|
||||||
prealloc = base;
|
prealloc = base;
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_init(&ctx);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_init(&ctx);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_build(&ctx, &prealloc);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_build(&ctx, &prealloc);
|
||||||
for(outer = 0; outer != 64; outer++) {
|
for(outer = 0; outer != 64; outer++) {
|
||||||
fprintf(fp,"{\n");
|
fprintf(fp,"{\n");
|
||||||
for(inner = 0; inner != 16; inner++) {
|
for(inner = 0; inner != 16; inner++) {
|
||||||
|
@ -68,7 +68,7 @@ int main(int argc, char **argv) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fprintf(fp,"};\n");
|
fprintf(fp,"};\n");
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_clear(&ctx);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_clear(&ctx);
|
||||||
free(base);
|
free(base);
|
||||||
|
|
||||||
fprintf(fp, "#undef SC\n");
|
fprintf(fp, "#undef SC\n");
|
||||||
|
|
|
@ -12,131 +12,131 @@
|
||||||
|
|
||||||
/** A group element of the secp256k1 curve, in affine coordinates. */
|
/** A group element of the secp256k1 curve, in affine coordinates. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_fe x;
|
rustsecp256k1_v0_1_1_fe x;
|
||||||
rustsecp256k1_v0_1_0_fe y;
|
rustsecp256k1_v0_1_1_fe y;
|
||||||
int infinity; /* whether this represents the point at infinity */
|
int infinity; /* whether this represents the point at infinity */
|
||||||
} rustsecp256k1_v0_1_0_ge;
|
} rustsecp256k1_v0_1_1_ge;
|
||||||
|
|
||||||
#define SECP256K1_GE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), 0}
|
#define SECP256K1_GE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), 0}
|
||||||
#define SECP256K1_GE_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1}
|
#define SECP256K1_GE_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1}
|
||||||
|
|
||||||
/** A group element of the secp256k1 curve, in jacobian coordinates. */
|
/** A group element of the secp256k1 curve, in jacobian coordinates. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_fe x; /* actual X: x/z^2 */
|
rustsecp256k1_v0_1_1_fe x; /* actual X: x/z^2 */
|
||||||
rustsecp256k1_v0_1_0_fe y; /* actual Y: y/z^3 */
|
rustsecp256k1_v0_1_1_fe y; /* actual Y: y/z^3 */
|
||||||
rustsecp256k1_v0_1_0_fe z;
|
rustsecp256k1_v0_1_1_fe z;
|
||||||
int infinity; /* whether this represents the point at infinity */
|
int infinity; /* whether this represents the point at infinity */
|
||||||
} rustsecp256k1_v0_1_0_gej;
|
} rustsecp256k1_v0_1_1_gej;
|
||||||
|
|
||||||
#define SECP256K1_GEJ_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1), 0}
|
#define SECP256K1_GEJ_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1), 0}
|
||||||
#define SECP256K1_GEJ_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1}
|
#define SECP256K1_GEJ_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1}
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_fe_storage x;
|
rustsecp256k1_v0_1_1_fe_storage x;
|
||||||
rustsecp256k1_v0_1_0_fe_storage y;
|
rustsecp256k1_v0_1_1_fe_storage y;
|
||||||
} rustsecp256k1_v0_1_0_ge_storage;
|
} rustsecp256k1_v0_1_1_ge_storage;
|
||||||
|
|
||||||
#define SECP256K1_GE_STORAGE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_STORAGE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_STORAGE_CONST((i),(j),(k),(l),(m),(n),(o),(p))}
|
#define SECP256K1_GE_STORAGE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_STORAGE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_STORAGE_CONST((i),(j),(k),(l),(m),(n),(o),(p))}
|
||||||
|
|
||||||
#define SECP256K1_GE_STORAGE_CONST_GET(t) SECP256K1_FE_STORAGE_CONST_GET(t.x), SECP256K1_FE_STORAGE_CONST_GET(t.y)
|
#define SECP256K1_GE_STORAGE_CONST_GET(t) SECP256K1_FE_STORAGE_CONST_GET(t.x), SECP256K1_FE_STORAGE_CONST_GET(t.y)
|
||||||
|
|
||||||
/** Set a group element equal to the point with given X and Y coordinates */
|
/** Set a group element equal to the point with given X and Y coordinates */
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_xy(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x, const rustsecp256k1_v0_1_0_fe *y);
|
static void rustsecp256k1_v0_1_1_ge_set_xy(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x, const rustsecp256k1_v0_1_1_fe *y);
|
||||||
|
|
||||||
/** Set a group element (affine) equal to the point with the given X coordinate
|
/** Set a group element (affine) equal to the point with the given X coordinate
|
||||||
* and a Y coordinate that is a quadratic residue modulo p. The return value
|
* and a Y coordinate that is a quadratic residue modulo p. The return value
|
||||||
* is true iff a coordinate with the given X coordinate exists.
|
* is true iff a coordinate with the given X coordinate exists.
|
||||||
*/
|
*/
|
||||||
static int rustsecp256k1_v0_1_0_ge_set_xquad(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x);
|
static int rustsecp256k1_v0_1_1_ge_set_xquad(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x);
|
||||||
|
|
||||||
/** Set a group element (affine) equal to the point with the given X coordinate, and given oddness
|
/** Set a group element (affine) equal to the point with the given X coordinate, and given oddness
|
||||||
* for Y. Return value indicates whether the result is valid. */
|
* for Y. Return value indicates whether the result is valid. */
|
||||||
static int rustsecp256k1_v0_1_0_ge_set_xo_var(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x, int odd);
|
static int rustsecp256k1_v0_1_1_ge_set_xo_var(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x, int odd);
|
||||||
|
|
||||||
/** Check whether a group element is the point at infinity. */
|
/** Check whether a group element is the point at infinity. */
|
||||||
static int rustsecp256k1_v0_1_0_ge_is_infinity(const rustsecp256k1_v0_1_0_ge *a);
|
static int rustsecp256k1_v0_1_1_ge_is_infinity(const rustsecp256k1_v0_1_1_ge *a);
|
||||||
|
|
||||||
/** Check whether a group element is valid (i.e., on the curve). */
|
/** Check whether a group element is valid (i.e., on the curve). */
|
||||||
static int rustsecp256k1_v0_1_0_ge_is_valid_var(const rustsecp256k1_v0_1_0_ge *a);
|
static int rustsecp256k1_v0_1_1_ge_is_valid_var(const rustsecp256k1_v0_1_1_ge *a);
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_neg(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge *a);
|
static void rustsecp256k1_v0_1_1_ge_neg(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge *a);
|
||||||
|
|
||||||
/** Set a group element equal to another which is given in jacobian coordinates */
|
/** Set a group element equal to another which is given in jacobian coordinates */
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_gej(rustsecp256k1_v0_1_0_ge *r, rustsecp256k1_v0_1_0_gej *a);
|
static void rustsecp256k1_v0_1_1_ge_set_gej(rustsecp256k1_v0_1_1_ge *r, rustsecp256k1_v0_1_1_gej *a);
|
||||||
|
|
||||||
/** Set a batch of group elements equal to the inputs given in jacobian coordinates */
|
/** Set a batch of group elements equal to the inputs given in jacobian coordinates */
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_all_gej_var(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_gej *a, size_t len);
|
static void rustsecp256k1_v0_1_1_ge_set_all_gej_var(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_gej *a, size_t len);
|
||||||
|
|
||||||
/** Bring a batch inputs given in jacobian coordinates (with known z-ratios) to
|
/** Bring a batch inputs given in jacobian coordinates (with known z-ratios) to
|
||||||
* the same global z "denominator". zr must contain the known z-ratios such
|
* the same global z "denominator". zr must contain the known z-ratios such
|
||||||
* that mul(a[i].z, zr[i+1]) == a[i+1].z. zr[0] is ignored. The x and y
|
* that mul(a[i].z, zr[i+1]) == a[i+1].z. zr[0] is ignored. The x and y
|
||||||
* coordinates of the result are stored in r, the common z coordinate is
|
* coordinates of the result are stored in r, the common z coordinate is
|
||||||
* stored in globalz. */
|
* stored in globalz. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_globalz_set_table_gej(size_t len, rustsecp256k1_v0_1_0_ge *r, rustsecp256k1_v0_1_0_fe *globalz, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_fe *zr);
|
static void rustsecp256k1_v0_1_1_ge_globalz_set_table_gej(size_t len, rustsecp256k1_v0_1_1_ge *r, rustsecp256k1_v0_1_1_fe *globalz, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_fe *zr);
|
||||||
|
|
||||||
/** Set a group element (affine) equal to the point at infinity. */
|
/** Set a group element (affine) equal to the point at infinity. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_infinity(rustsecp256k1_v0_1_0_ge *r);
|
static void rustsecp256k1_v0_1_1_ge_set_infinity(rustsecp256k1_v0_1_1_ge *r);
|
||||||
|
|
||||||
/** Set a group element (jacobian) equal to the point at infinity. */
|
/** Set a group element (jacobian) equal to the point at infinity. */
|
||||||
static void rustsecp256k1_v0_1_0_gej_set_infinity(rustsecp256k1_v0_1_0_gej *r);
|
static void rustsecp256k1_v0_1_1_gej_set_infinity(rustsecp256k1_v0_1_1_gej *r);
|
||||||
|
|
||||||
/** Set a group element (jacobian) equal to another which is given in affine coordinates. */
|
/** Set a group element (jacobian) equal to another which is given in affine coordinates. */
|
||||||
static void rustsecp256k1_v0_1_0_gej_set_ge(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_ge *a);
|
static void rustsecp256k1_v0_1_1_gej_set_ge(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_ge *a);
|
||||||
|
|
||||||
/** Compare the X coordinate of a group element (jacobian). */
|
/** Compare the X coordinate of a group element (jacobian). */
|
||||||
static int rustsecp256k1_v0_1_0_gej_eq_x_var(const rustsecp256k1_v0_1_0_fe *x, const rustsecp256k1_v0_1_0_gej *a);
|
static int rustsecp256k1_v0_1_1_gej_eq_x_var(const rustsecp256k1_v0_1_1_fe *x, const rustsecp256k1_v0_1_1_gej *a);
|
||||||
|
|
||||||
/** Set r equal to the inverse of a (i.e., mirrored around the X axis) */
|
/** Set r equal to the inverse of a (i.e., mirrored around the X axis) */
|
||||||
static void rustsecp256k1_v0_1_0_gej_neg(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a);
|
static void rustsecp256k1_v0_1_1_gej_neg(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a);
|
||||||
|
|
||||||
/** Check whether a group element is the point at infinity. */
|
/** Check whether a group element is the point at infinity. */
|
||||||
static int rustsecp256k1_v0_1_0_gej_is_infinity(const rustsecp256k1_v0_1_0_gej *a);
|
static int rustsecp256k1_v0_1_1_gej_is_infinity(const rustsecp256k1_v0_1_1_gej *a);
|
||||||
|
|
||||||
/** Check whether a group element's y coordinate is a quadratic residue. */
|
/** Check whether a group element's y coordinate is a quadratic residue. */
|
||||||
static int rustsecp256k1_v0_1_0_gej_has_quad_y_var(const rustsecp256k1_v0_1_0_gej *a);
|
static int rustsecp256k1_v0_1_1_gej_has_quad_y_var(const rustsecp256k1_v0_1_1_gej *a);
|
||||||
|
|
||||||
/** Set r equal to the double of a. If rzr is not-NULL, r->z = a->z * *rzr (where infinity means an implicit z = 0).
|
/** Set r equal to the double of a. If rzr is not-NULL, r->z = a->z * *rzr (where infinity means an implicit z = 0).
|
||||||
* a may not be zero. Constant time. */
|
* a may not be zero. Constant time. */
|
||||||
static void rustsecp256k1_v0_1_0_gej_double_nonzero(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, rustsecp256k1_v0_1_0_fe *rzr);
|
static void rustsecp256k1_v0_1_1_gej_double_nonzero(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, rustsecp256k1_v0_1_1_fe *rzr);
|
||||||
|
|
||||||
/** Set r equal to the double of a. If rzr is not-NULL, r->z = a->z * *rzr (where infinity means an implicit z = 0). */
|
/** Set r equal to the double of a. If rzr is not-NULL, r->z = a->z * *rzr (where infinity means an implicit z = 0). */
|
||||||
static void rustsecp256k1_v0_1_0_gej_double_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, rustsecp256k1_v0_1_0_fe *rzr);
|
static void rustsecp256k1_v0_1_1_gej_double_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, rustsecp256k1_v0_1_1_fe *rzr);
|
||||||
|
|
||||||
/** Set r equal to the sum of a and b. If rzr is non-NULL, r->z = a->z * *rzr (a cannot be infinity in that case). */
|
/** Set r equal to the sum of a and b. If rzr is non-NULL, r->z = a->z * *rzr (a cannot be infinity in that case). */
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_gej *b, rustsecp256k1_v0_1_0_fe *rzr);
|
static void rustsecp256k1_v0_1_1_gej_add_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_gej *b, rustsecp256k1_v0_1_1_fe *rzr);
|
||||||
|
|
||||||
/** Set r equal to the sum of a and b (with b given in affine coordinates, and not infinity). */
|
/** Set r equal to the sum of a and b (with b given in affine coordinates, and not infinity). */
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_ge(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b);
|
static void rustsecp256k1_v0_1_1_gej_add_ge(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b);
|
||||||
|
|
||||||
/** Set r equal to the sum of a and b (with b given in affine coordinates). This is more efficient
|
/** Set r equal to the sum of a and b (with b given in affine coordinates). This is more efficient
|
||||||
than rustsecp256k1_v0_1_0_gej_add_var. It is identical to rustsecp256k1_v0_1_0_gej_add_ge but without constant-time
|
than rustsecp256k1_v0_1_1_gej_add_var. It is identical to rustsecp256k1_v0_1_1_gej_add_ge but without constant-time
|
||||||
guarantee, and b is allowed to be infinity. If rzr is non-NULL, r->z = a->z * *rzr (a cannot be infinity in that case). */
|
guarantee, and b is allowed to be infinity. If rzr is non-NULL, r->z = a->z * *rzr (a cannot be infinity in that case). */
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_ge_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b, rustsecp256k1_v0_1_0_fe *rzr);
|
static void rustsecp256k1_v0_1_1_gej_add_ge_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b, rustsecp256k1_v0_1_1_fe *rzr);
|
||||||
|
|
||||||
/** Set r equal to the sum of a and b (with the inverse of b's Z coordinate passed as bzinv). */
|
/** Set r equal to the sum of a and b (with the inverse of b's Z coordinate passed as bzinv). */
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_zinv_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b, const rustsecp256k1_v0_1_0_fe *bzinv);
|
static void rustsecp256k1_v0_1_1_gej_add_zinv_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b, const rustsecp256k1_v0_1_1_fe *bzinv);
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
/** Set r to be equal to lambda times a, where lambda is chosen in a way such that this is very fast. */
|
/** Set r to be equal to lambda times a, where lambda is chosen in a way such that this is very fast. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_mul_lambda(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge *a);
|
static void rustsecp256k1_v0_1_1_ge_mul_lambda(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge *a);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Clear a rustsecp256k1_v0_1_0_gej to prevent leaking sensitive information. */
|
/** Clear a rustsecp256k1_v0_1_1_gej to prevent leaking sensitive information. */
|
||||||
static void rustsecp256k1_v0_1_0_gej_clear(rustsecp256k1_v0_1_0_gej *r);
|
static void rustsecp256k1_v0_1_1_gej_clear(rustsecp256k1_v0_1_1_gej *r);
|
||||||
|
|
||||||
/** Clear a rustsecp256k1_v0_1_0_ge to prevent leaking sensitive information. */
|
/** Clear a rustsecp256k1_v0_1_1_ge to prevent leaking sensitive information. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_clear(rustsecp256k1_v0_1_0_ge *r);
|
static void rustsecp256k1_v0_1_1_ge_clear(rustsecp256k1_v0_1_1_ge *r);
|
||||||
|
|
||||||
/** Convert a group element to the storage type. */
|
/** Convert a group element to the storage type. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_to_storage(rustsecp256k1_v0_1_0_ge_storage *r, const rustsecp256k1_v0_1_0_ge *a);
|
static void rustsecp256k1_v0_1_1_ge_to_storage(rustsecp256k1_v0_1_1_ge_storage *r, const rustsecp256k1_v0_1_1_ge *a);
|
||||||
|
|
||||||
/** Convert a group element back from the storage type. */
|
/** Convert a group element back from the storage type. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_from_storage(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge_storage *a);
|
static void rustsecp256k1_v0_1_1_ge_from_storage(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge_storage *a);
|
||||||
|
|
||||||
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
/** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. */
|
||||||
static void rustsecp256k1_v0_1_0_ge_storage_cmov(rustsecp256k1_v0_1_0_ge_storage *r, const rustsecp256k1_v0_1_0_ge_storage *a, int flag);
|
static void rustsecp256k1_v0_1_1_ge_storage_cmov(rustsecp256k1_v0_1_1_ge_storage *r, const rustsecp256k1_v0_1_1_ge_storage *a, int flag);
|
||||||
|
|
||||||
/** Rescale a jacobian point by b which must be non-zero. Constant-time. */
|
/** Rescale a jacobian point by b which must be non-zero. Constant-time. */
|
||||||
static void rustsecp256k1_v0_1_0_gej_rescale(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_fe *b);
|
static void rustsecp256k1_v0_1_1_gej_rescale(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_fe *b);
|
||||||
|
|
||||||
#endif /* SECP256K1_GROUP_H */
|
#endif /* SECP256K1_GROUP_H */
|
||||||
|
|
|
@ -38,7 +38,7 @@
|
||||||
*/
|
*/
|
||||||
#if defined(EXHAUSTIVE_TEST_ORDER)
|
#if defined(EXHAUSTIVE_TEST_ORDER)
|
||||||
# if EXHAUSTIVE_TEST_ORDER == 199
|
# if EXHAUSTIVE_TEST_ORDER == 199
|
||||||
static const rustsecp256k1_v0_1_0_ge rustsecp256k1_v0_1_0_ge_const_g = SECP256K1_GE_CONST(
|
static const rustsecp256k1_v0_1_1_ge rustsecp256k1_v0_1_1_ge_const_g = SECP256K1_GE_CONST(
|
||||||
0xFA7CC9A7, 0x0737F2DB, 0xA749DD39, 0x2B4FB069,
|
0xFA7CC9A7, 0x0737F2DB, 0xA749DD39, 0x2B4FB069,
|
||||||
0x3B017A7D, 0xA808C2F1, 0xFB12940C, 0x9EA66C18,
|
0x3B017A7D, 0xA808C2F1, 0xFB12940C, 0x9EA66C18,
|
||||||
0x78AC123A, 0x5ED8AEF3, 0x8732BC91, 0x1F3A2868,
|
0x78AC123A, 0x5ED8AEF3, 0x8732BC91, 0x1F3A2868,
|
||||||
|
@ -47,7 +47,7 @@ static const rustsecp256k1_v0_1_0_ge rustsecp256k1_v0_1_0_ge_const_g = SECP256K1
|
||||||
|
|
||||||
static const int CURVE_B = 4;
|
static const int CURVE_B = 4;
|
||||||
# elif EXHAUSTIVE_TEST_ORDER == 13
|
# elif EXHAUSTIVE_TEST_ORDER == 13
|
||||||
static const rustsecp256k1_v0_1_0_ge rustsecp256k1_v0_1_0_ge_const_g = SECP256K1_GE_CONST(
|
static const rustsecp256k1_v0_1_1_ge rustsecp256k1_v0_1_1_ge_const_g = SECP256K1_GE_CONST(
|
||||||
0xedc60018, 0xa51a786b, 0x2ea91f4d, 0x4c9416c0,
|
0xedc60018, 0xa51a786b, 0x2ea91f4d, 0x4c9416c0,
|
||||||
0x9de54c3b, 0xa1316554, 0x6cf4345c, 0x7277ef15,
|
0x9de54c3b, 0xa1316554, 0x6cf4345c, 0x7277ef15,
|
||||||
0x54cb1b6b, 0xdc8c1273, 0x087844ea, 0x43f4603e,
|
0x54cb1b6b, 0xdc8c1273, 0x087844ea, 0x43f4603e,
|
||||||
|
@ -61,7 +61,7 @@ static const int CURVE_B = 2;
|
||||||
/** Generator for secp256k1, value 'g' defined in
|
/** Generator for secp256k1, value 'g' defined in
|
||||||
* "Standards for Efficient Cryptography" (SEC2) 2.7.1.
|
* "Standards for Efficient Cryptography" (SEC2) 2.7.1.
|
||||||
*/
|
*/
|
||||||
static const rustsecp256k1_v0_1_0_ge rustsecp256k1_v0_1_0_ge_const_g = SECP256K1_GE_CONST(
|
static const rustsecp256k1_v0_1_1_ge rustsecp256k1_v0_1_1_ge_const_g = SECP256K1_GE_CONST(
|
||||||
0x79BE667EUL, 0xF9DCBBACUL, 0x55A06295UL, 0xCE870B07UL,
|
0x79BE667EUL, 0xF9DCBBACUL, 0x55A06295UL, 0xCE870B07UL,
|
||||||
0x029BFCDBUL, 0x2DCE28D9UL, 0x59F2815BUL, 0x16F81798UL,
|
0x029BFCDBUL, 0x2DCE28D9UL, 0x59F2815BUL, 0x16F81798UL,
|
||||||
0x483ADA77UL, 0x26A3C465UL, 0x5DA4FBFCUL, 0x0E1108A8UL,
|
0x483ADA77UL, 0x26A3C465UL, 0x5DA4FBFCUL, 0x0E1108A8UL,
|
||||||
|
@ -71,63 +71,63 @@ static const rustsecp256k1_v0_1_0_ge rustsecp256k1_v0_1_0_ge_const_g = SECP256K1
|
||||||
static const int CURVE_B = 7;
|
static const int CURVE_B = 7;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_gej_zinv(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_fe *zi) {
|
static void rustsecp256k1_v0_1_1_ge_set_gej_zinv(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_fe *zi) {
|
||||||
rustsecp256k1_v0_1_0_fe zi2;
|
rustsecp256k1_v0_1_1_fe zi2;
|
||||||
rustsecp256k1_v0_1_0_fe zi3;
|
rustsecp256k1_v0_1_1_fe zi3;
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&zi2, zi);
|
rustsecp256k1_v0_1_1_fe_sqr(&zi2, zi);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&zi3, &zi2, zi);
|
rustsecp256k1_v0_1_1_fe_mul(&zi3, &zi2, zi);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->x, &a->x, &zi2);
|
rustsecp256k1_v0_1_1_fe_mul(&r->x, &a->x, &zi2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->y, &a->y, &zi3);
|
rustsecp256k1_v0_1_1_fe_mul(&r->y, &a->y, &zi3);
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_xy(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x, const rustsecp256k1_v0_1_0_fe *y) {
|
static void rustsecp256k1_v0_1_1_ge_set_xy(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x, const rustsecp256k1_v0_1_1_fe *y) {
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
r->x = *x;
|
r->x = *x;
|
||||||
r->y = *y;
|
r->y = *y;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ge_is_infinity(const rustsecp256k1_v0_1_0_ge *a) {
|
static int rustsecp256k1_v0_1_1_ge_is_infinity(const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
return a->infinity;
|
return a->infinity;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_neg(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge *a) {
|
static void rustsecp256k1_v0_1_1_ge_neg(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
*r = *a;
|
*r = *a;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&r->y);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&r->y);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->y, 1);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->y, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_gej(rustsecp256k1_v0_1_0_ge *r, rustsecp256k1_v0_1_0_gej *a) {
|
static void rustsecp256k1_v0_1_1_ge_set_gej(rustsecp256k1_v0_1_1_ge *r, rustsecp256k1_v0_1_1_gej *a) {
|
||||||
rustsecp256k1_v0_1_0_fe z2, z3;
|
rustsecp256k1_v0_1_1_fe z2, z3;
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
rustsecp256k1_v0_1_0_fe_inv(&a->z, &a->z);
|
rustsecp256k1_v0_1_1_fe_inv(&a->z, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z2, &a->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z2, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&z3, &a->z, &z2);
|
rustsecp256k1_v0_1_1_fe_mul(&z3, &a->z, &z2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&a->x, &a->x, &z2);
|
rustsecp256k1_v0_1_1_fe_mul(&a->x, &a->x, &z2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&a->y, &a->y, &z3);
|
rustsecp256k1_v0_1_1_fe_mul(&a->y, &a->y, &z3);
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&a->z, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(&a->z, 1);
|
||||||
r->x = a->x;
|
r->x = a->x;
|
||||||
r->y = a->y;
|
r->y = a->y;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_gej_var(rustsecp256k1_v0_1_0_ge *r, rustsecp256k1_v0_1_0_gej *a) {
|
static void rustsecp256k1_v0_1_1_ge_set_gej_var(rustsecp256k1_v0_1_1_ge *r, rustsecp256k1_v0_1_1_gej *a) {
|
||||||
rustsecp256k1_v0_1_0_fe z2, z3;
|
rustsecp256k1_v0_1_1_fe z2, z3;
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_inv_var(&a->z, &a->z);
|
rustsecp256k1_v0_1_1_fe_inv_var(&a->z, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z2, &a->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z2, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&z3, &a->z, &z2);
|
rustsecp256k1_v0_1_1_fe_mul(&z3, &a->z, &z2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&a->x, &a->x, &z2);
|
rustsecp256k1_v0_1_1_fe_mul(&a->x, &a->x, &z2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&a->y, &a->y, &z3);
|
rustsecp256k1_v0_1_1_fe_mul(&a->y, &a->y, &z3);
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&a->z, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(&a->z, 1);
|
||||||
r->x = a->x;
|
r->x = a->x;
|
||||||
r->y = a->y;
|
r->y = a->y;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_all_gej_var(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_gej *a, size_t len) {
|
static void rustsecp256k1_v0_1_1_ge_set_all_gej_var(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_gej *a, size_t len) {
|
||||||
rustsecp256k1_v0_1_0_fe u;
|
rustsecp256k1_v0_1_1_fe u;
|
||||||
size_t i;
|
size_t i;
|
||||||
size_t last_i = SIZE_MAX;
|
size_t last_i = SIZE_MAX;
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ static void rustsecp256k1_v0_1_0_ge_set_all_gej_var(rustsecp256k1_v0_1_0_ge *r,
|
||||||
if (last_i == SIZE_MAX) {
|
if (last_i == SIZE_MAX) {
|
||||||
r[i].x = a[i].z;
|
r[i].x = a[i].z;
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r[i].x, &r[last_i].x, &a[i].z);
|
rustsecp256k1_v0_1_1_fe_mul(&r[i].x, &r[last_i].x, &a[i].z);
|
||||||
}
|
}
|
||||||
last_i = i;
|
last_i = i;
|
||||||
}
|
}
|
||||||
|
@ -145,14 +145,14 @@ static void rustsecp256k1_v0_1_0_ge_set_all_gej_var(rustsecp256k1_v0_1_0_ge *r,
|
||||||
if (last_i == SIZE_MAX) {
|
if (last_i == SIZE_MAX) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_inv_var(&u, &r[last_i].x);
|
rustsecp256k1_v0_1_1_fe_inv_var(&u, &r[last_i].x);
|
||||||
|
|
||||||
i = last_i;
|
i = last_i;
|
||||||
while (i > 0) {
|
while (i > 0) {
|
||||||
i--;
|
i--;
|
||||||
if (!a[i].infinity) {
|
if (!a[i].infinity) {
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r[last_i].x, &r[i].x, &u);
|
rustsecp256k1_v0_1_1_fe_mul(&r[last_i].x, &r[i].x, &u);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u, &u, &a[last_i].z);
|
rustsecp256k1_v0_1_1_fe_mul(&u, &u, &a[last_i].z);
|
||||||
last_i = i;
|
last_i = i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,21 +162,21 @@ static void rustsecp256k1_v0_1_0_ge_set_all_gej_var(rustsecp256k1_v0_1_0_ge *r,
|
||||||
for (i = 0; i < len; i++) {
|
for (i = 0; i < len; i++) {
|
||||||
r[i].infinity = a[i].infinity;
|
r[i].infinity = a[i].infinity;
|
||||||
if (!a[i].infinity) {
|
if (!a[i].infinity) {
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej_zinv(&r[i], &a[i], &r[i].x);
|
rustsecp256k1_v0_1_1_ge_set_gej_zinv(&r[i], &a[i], &r[i].x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_globalz_set_table_gej(size_t len, rustsecp256k1_v0_1_0_ge *r, rustsecp256k1_v0_1_0_fe *globalz, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_fe *zr) {
|
static void rustsecp256k1_v0_1_1_ge_globalz_set_table_gej(size_t len, rustsecp256k1_v0_1_1_ge *r, rustsecp256k1_v0_1_1_fe *globalz, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_fe *zr) {
|
||||||
size_t i = len - 1;
|
size_t i = len - 1;
|
||||||
rustsecp256k1_v0_1_0_fe zs;
|
rustsecp256k1_v0_1_1_fe zs;
|
||||||
|
|
||||||
if (len > 0) {
|
if (len > 0) {
|
||||||
/* The z of the final point gives us the "global Z" for the table. */
|
/* The z of the final point gives us the "global Z" for the table. */
|
||||||
r[i].x = a[i].x;
|
r[i].x = a[i].x;
|
||||||
r[i].y = a[i].y;
|
r[i].y = a[i].y;
|
||||||
/* Ensure all y values are in weak normal form for fast negation of points */
|
/* Ensure all y values are in weak normal form for fast negation of points */
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&r[i].y);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&r[i].y);
|
||||||
*globalz = a[i].z;
|
*globalz = a[i].z;
|
||||||
r[i].infinity = 0;
|
r[i].infinity = 0;
|
||||||
zs = zr[i];
|
zs = zr[i];
|
||||||
|
@ -184,93 +184,93 @@ static void rustsecp256k1_v0_1_0_ge_globalz_set_table_gej(size_t len, rustsecp25
|
||||||
/* Work our way backwards, using the z-ratios to scale the x/y values. */
|
/* Work our way backwards, using the z-ratios to scale the x/y values. */
|
||||||
while (i > 0) {
|
while (i > 0) {
|
||||||
if (i != len - 1) {
|
if (i != len - 1) {
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&zs, &zs, &zr[i]);
|
rustsecp256k1_v0_1_1_fe_mul(&zs, &zs, &zr[i]);
|
||||||
}
|
}
|
||||||
i--;
|
i--;
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej_zinv(&r[i], &a[i], &zs);
|
rustsecp256k1_v0_1_1_ge_set_gej_zinv(&r[i], &a[i], &zs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_set_infinity(rustsecp256k1_v0_1_0_gej *r) {
|
static void rustsecp256k1_v0_1_1_gej_set_infinity(rustsecp256k1_v0_1_1_gej *r) {
|
||||||
r->infinity = 1;
|
r->infinity = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->x);
|
rustsecp256k1_v0_1_1_fe_clear(&r->x);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->y);
|
rustsecp256k1_v0_1_1_fe_clear(&r->y);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->z);
|
rustsecp256k1_v0_1_1_fe_clear(&r->z);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_set_infinity(rustsecp256k1_v0_1_0_ge *r) {
|
static void rustsecp256k1_v0_1_1_ge_set_infinity(rustsecp256k1_v0_1_1_ge *r) {
|
||||||
r->infinity = 1;
|
r->infinity = 1;
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->x);
|
rustsecp256k1_v0_1_1_fe_clear(&r->x);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->y);
|
rustsecp256k1_v0_1_1_fe_clear(&r->y);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_clear(rustsecp256k1_v0_1_0_gej *r) {
|
static void rustsecp256k1_v0_1_1_gej_clear(rustsecp256k1_v0_1_1_gej *r) {
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->x);
|
rustsecp256k1_v0_1_1_fe_clear(&r->x);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->y);
|
rustsecp256k1_v0_1_1_fe_clear(&r->y);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->z);
|
rustsecp256k1_v0_1_1_fe_clear(&r->z);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_clear(rustsecp256k1_v0_1_0_ge *r) {
|
static void rustsecp256k1_v0_1_1_ge_clear(rustsecp256k1_v0_1_1_ge *r) {
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->x);
|
rustsecp256k1_v0_1_1_fe_clear(&r->x);
|
||||||
rustsecp256k1_v0_1_0_fe_clear(&r->y);
|
rustsecp256k1_v0_1_1_fe_clear(&r->y);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ge_set_xquad(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x) {
|
static int rustsecp256k1_v0_1_1_ge_set_xquad(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x) {
|
||||||
rustsecp256k1_v0_1_0_fe x2, x3, c;
|
rustsecp256k1_v0_1_1_fe x2, x3, c;
|
||||||
r->x = *x;
|
r->x = *x;
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x2, x);
|
rustsecp256k1_v0_1_1_fe_sqr(&x2, x);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&x3, x, &x2);
|
rustsecp256k1_v0_1_1_fe_mul(&x3, x, &x2);
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&c, CURVE_B);
|
rustsecp256k1_v0_1_1_fe_set_int(&c, CURVE_B);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&c, &x3);
|
rustsecp256k1_v0_1_1_fe_add(&c, &x3);
|
||||||
return rustsecp256k1_v0_1_0_fe_sqrt(&r->y, &c);
|
return rustsecp256k1_v0_1_1_fe_sqrt(&r->y, &c);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ge_set_xo_var(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_fe *x, int odd) {
|
static int rustsecp256k1_v0_1_1_ge_set_xo_var(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_fe *x, int odd) {
|
||||||
if (!rustsecp256k1_v0_1_0_ge_set_xquad(r, x)) {
|
if (!rustsecp256k1_v0_1_1_ge_set_xquad(r, x)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&r->y);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&r->y);
|
||||||
if (rustsecp256k1_v0_1_0_fe_is_odd(&r->y) != odd) {
|
if (rustsecp256k1_v0_1_1_fe_is_odd(&r->y) != odd) {
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->y, 1);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->y, 1);
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_set_ge(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_ge *a) {
|
static void rustsecp256k1_v0_1_1_gej_set_ge(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
r->x = a->x;
|
r->x = a->x;
|
||||||
r->y = a->y;
|
r->y = a->y;
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&r->z, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(&r->z, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_gej_eq_x_var(const rustsecp256k1_v0_1_0_fe *x, const rustsecp256k1_v0_1_0_gej *a) {
|
static int rustsecp256k1_v0_1_1_gej_eq_x_var(const rustsecp256k1_v0_1_1_fe *x, const rustsecp256k1_v0_1_1_gej *a) {
|
||||||
rustsecp256k1_v0_1_0_fe r, r2;
|
rustsecp256k1_v0_1_1_fe r, r2;
|
||||||
VERIFY_CHECK(!a->infinity);
|
VERIFY_CHECK(!a->infinity);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&r, &a->z); rustsecp256k1_v0_1_0_fe_mul(&r, &r, x);
|
rustsecp256k1_v0_1_1_fe_sqr(&r, &a->z); rustsecp256k1_v0_1_1_fe_mul(&r, &r, x);
|
||||||
r2 = a->x; rustsecp256k1_v0_1_0_fe_normalize_weak(&r2);
|
r2 = a->x; rustsecp256k1_v0_1_1_fe_normalize_weak(&r2);
|
||||||
return rustsecp256k1_v0_1_0_fe_equal_var(&r, &r2);
|
return rustsecp256k1_v0_1_1_fe_equal_var(&r, &r2);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_neg(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a) {
|
static void rustsecp256k1_v0_1_1_gej_neg(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a) {
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
r->x = a->x;
|
r->x = a->x;
|
||||||
r->y = a->y;
|
r->y = a->y;
|
||||||
r->z = a->z;
|
r->z = a->z;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&r->y);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&r->y);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->y, 1);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->y, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_gej_is_infinity(const rustsecp256k1_v0_1_0_gej *a) {
|
static int rustsecp256k1_v0_1_1_gej_is_infinity(const rustsecp256k1_v0_1_1_gej *a) {
|
||||||
return a->infinity;
|
return a->infinity;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_gej_is_valid_var(const rustsecp256k1_v0_1_0_gej *a) {
|
static int rustsecp256k1_v0_1_1_gej_is_valid_var(const rustsecp256k1_v0_1_1_gej *a) {
|
||||||
rustsecp256k1_v0_1_0_fe y2, x3, z2, z6;
|
rustsecp256k1_v0_1_1_fe y2, x3, z2, z6;
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -279,31 +279,31 @@ static int rustsecp256k1_v0_1_0_gej_is_valid_var(const rustsecp256k1_v0_1_0_gej
|
||||||
* Y^2 / Z^6 = X^3 / Z^6 + 7
|
* Y^2 / Z^6 = X^3 / Z^6 + 7
|
||||||
* Y^2 = X^3 + 7*Z^6
|
* Y^2 = X^3 + 7*Z^6
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&y2, &a->y);
|
rustsecp256k1_v0_1_1_fe_sqr(&y2, &a->y);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x3, &a->x); rustsecp256k1_v0_1_0_fe_mul(&x3, &x3, &a->x);
|
rustsecp256k1_v0_1_1_fe_sqr(&x3, &a->x); rustsecp256k1_v0_1_1_fe_mul(&x3, &x3, &a->x);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z2, &a->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z2, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z6, &z2); rustsecp256k1_v0_1_0_fe_mul(&z6, &z6, &z2);
|
rustsecp256k1_v0_1_1_fe_sqr(&z6, &z2); rustsecp256k1_v0_1_1_fe_mul(&z6, &z6, &z2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&z6, CURVE_B);
|
rustsecp256k1_v0_1_1_fe_mul_int(&z6, CURVE_B);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&x3, &z6);
|
rustsecp256k1_v0_1_1_fe_add(&x3, &z6);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&x3);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&x3);
|
||||||
return rustsecp256k1_v0_1_0_fe_equal_var(&y2, &x3);
|
return rustsecp256k1_v0_1_1_fe_equal_var(&y2, &x3);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ge_is_valid_var(const rustsecp256k1_v0_1_0_ge *a) {
|
static int rustsecp256k1_v0_1_1_ge_is_valid_var(const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
rustsecp256k1_v0_1_0_fe y2, x3, c;
|
rustsecp256k1_v0_1_1_fe y2, x3, c;
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
/* y^2 = x^3 + 7 */
|
/* y^2 = x^3 + 7 */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&y2, &a->y);
|
rustsecp256k1_v0_1_1_fe_sqr(&y2, &a->y);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&x3, &a->x); rustsecp256k1_v0_1_0_fe_mul(&x3, &x3, &a->x);
|
rustsecp256k1_v0_1_1_fe_sqr(&x3, &a->x); rustsecp256k1_v0_1_1_fe_mul(&x3, &x3, &a->x);
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&c, CURVE_B);
|
rustsecp256k1_v0_1_1_fe_set_int(&c, CURVE_B);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&x3, &c);
|
rustsecp256k1_v0_1_1_fe_add(&x3, &c);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&x3);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&x3);
|
||||||
return rustsecp256k1_v0_1_0_fe_equal_var(&y2, &x3);
|
return rustsecp256k1_v0_1_1_fe_equal_var(&y2, &x3);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_double_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, rustsecp256k1_v0_1_0_fe *rzr) {
|
static void rustsecp256k1_v0_1_1_gej_double_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, rustsecp256k1_v0_1_1_fe *rzr) {
|
||||||
/* Operations: 3 mul, 4 sqr, 0 normalize, 12 mul_int/add/negate.
|
/* Operations: 3 mul, 4 sqr, 0 normalize, 12 mul_int/add/negate.
|
||||||
*
|
*
|
||||||
* Note that there is an implementation described at
|
* Note that there is an implementation described at
|
||||||
|
@ -311,7 +311,7 @@ static void rustsecp256k1_v0_1_0_gej_double_var(rustsecp256k1_v0_1_0_gej *r, con
|
||||||
* which trades a multiply for a square, but in practice this is actually slower,
|
* which trades a multiply for a square, but in practice this is actually slower,
|
||||||
* mainly because it requires more normalizations.
|
* mainly because it requires more normalizations.
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_fe t1,t2,t3,t4;
|
rustsecp256k1_v0_1_1_fe t1,t2,t3,t4;
|
||||||
/** For secp256k1, 2Q is infinity if and only if Q is infinity. This is because if 2Q = infinity,
|
/** For secp256k1, 2Q is infinity if and only if Q is infinity. This is because if 2Q = infinity,
|
||||||
* Q must equal -Q, or that Q.y == -(Q.y), or Q.y is 0. For a point on y^2 = x^3 + 7 to have
|
* Q must equal -Q, or that Q.y == -(Q.y), or Q.y is 0. For a point on y^2 = x^3 + 7 to have
|
||||||
* y=0, x^3 must be -7 mod p. However, -7 has no cube root mod p.
|
* y=0, x^3 must be -7 mod p. However, -7 has no cube root mod p.
|
||||||
|
@ -325,47 +325,47 @@ static void rustsecp256k1_v0_1_0_gej_double_var(rustsecp256k1_v0_1_0_gej *r, con
|
||||||
r->infinity = a->infinity;
|
r->infinity = a->infinity;
|
||||||
if (r->infinity) {
|
if (r->infinity) {
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(rzr, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(rzr, 1);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
*rzr = a->y;
|
*rzr = a->y;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(rzr);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(rzr);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(rzr, 2);
|
rustsecp256k1_v0_1_1_fe_mul_int(rzr, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &a->z, &a->y);
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &a->z, &a->y);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&r->z, 2); /* Z' = 2*Y*Z (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&r->z, 2); /* Z' = 2*Y*Z (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t1, &a->x);
|
rustsecp256k1_v0_1_1_fe_sqr(&t1, &a->x);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&t1, 3); /* T1 = 3*X^2 (3) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&t1, 3); /* T1 = 3*X^2 (3) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t2, &t1); /* T2 = 9*X^4 (1) */
|
rustsecp256k1_v0_1_1_fe_sqr(&t2, &t1); /* T2 = 9*X^4 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t3, &a->y);
|
rustsecp256k1_v0_1_1_fe_sqr(&t3, &a->y);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&t3, 2); /* T3 = 2*Y^2 (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&t3, 2); /* T3 = 2*Y^2 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t4, &t3);
|
rustsecp256k1_v0_1_1_fe_sqr(&t4, &t3);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&t4, 2); /* T4 = 8*Y^4 (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&t4, 2); /* T4 = 8*Y^4 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t3, &t3, &a->x); /* T3 = 2*X*Y^2 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&t3, &t3, &a->x); /* T3 = 2*X*Y^2 (1) */
|
||||||
r->x = t3;
|
r->x = t3;
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&r->x, 4); /* X' = 8*X*Y^2 (4) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&r->x, 4); /* X' = 8*X*Y^2 (4) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->x, &r->x, 4); /* X' = -8*X*Y^2 (5) */
|
rustsecp256k1_v0_1_1_fe_negate(&r->x, &r->x, 4); /* X' = -8*X*Y^2 (5) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&r->x, &t2); /* X' = 9*X^4 - 8*X*Y^2 (6) */
|
rustsecp256k1_v0_1_1_fe_add(&r->x, &t2); /* X' = 9*X^4 - 8*X*Y^2 (6) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&t2, &t2, 1); /* T2 = -9*X^4 (2) */
|
rustsecp256k1_v0_1_1_fe_negate(&t2, &t2, 1); /* T2 = -9*X^4 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&t3, 6); /* T3 = 12*X*Y^2 (6) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&t3, 6); /* T3 = 12*X*Y^2 (6) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&t3, &t2); /* T3 = 12*X*Y^2 - 9*X^4 (8) */
|
rustsecp256k1_v0_1_1_fe_add(&t3, &t2); /* T3 = 12*X*Y^2 - 9*X^4 (8) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->y, &t1, &t3); /* Y' = 36*X^3*Y^2 - 27*X^6 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&r->y, &t1, &t3); /* Y' = 36*X^3*Y^2 - 27*X^6 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&t2, &t4, 2); /* T2 = -8*Y^4 (3) */
|
rustsecp256k1_v0_1_1_fe_negate(&t2, &t4, 2); /* T2 = -8*Y^4 (3) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&r->y, &t2); /* Y' = 36*X^3*Y^2 - 27*X^6 - 8*Y^4 (4) */
|
rustsecp256k1_v0_1_1_fe_add(&r->y, &t2); /* Y' = 36*X^3*Y^2 - 27*X^6 - 8*Y^4 (4) */
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_gej_double_nonzero(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, rustsecp256k1_v0_1_0_fe *rzr) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_gej_double_nonzero(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, rustsecp256k1_v0_1_1_fe *rzr) {
|
||||||
VERIFY_CHECK(!rustsecp256k1_v0_1_0_gej_is_infinity(a));
|
VERIFY_CHECK(!rustsecp256k1_v0_1_1_gej_is_infinity(a));
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(r, a, rzr);
|
rustsecp256k1_v0_1_1_gej_double_var(r, a, rzr);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_gej *b, rustsecp256k1_v0_1_0_fe *rzr) {
|
static void rustsecp256k1_v0_1_1_gej_add_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_gej *b, rustsecp256k1_v0_1_1_fe *rzr) {
|
||||||
/* Operations: 12 mul, 4 sqr, 2 normalize, 12 mul_int/add/negate */
|
/* Operations: 12 mul, 4 sqr, 2 normalize, 12 mul_int/add/negate */
|
||||||
rustsecp256k1_v0_1_0_fe z22, z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
rustsecp256k1_v0_1_1_fe z22, z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
||||||
|
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
VERIFY_CHECK(rzr == NULL);
|
VERIFY_CHECK(rzr == NULL);
|
||||||
|
@ -375,112 +375,112 @@ static void rustsecp256k1_v0_1_0_gej_add_var(rustsecp256k1_v0_1_0_gej *r, const
|
||||||
|
|
||||||
if (b->infinity) {
|
if (b->infinity) {
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(rzr, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(rzr, 1);
|
||||||
}
|
}
|
||||||
*r = *a;
|
*r = *a;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z22, &b->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z22, &b->z);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z12, &a->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z12, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u1, &a->x, &z22);
|
rustsecp256k1_v0_1_1_fe_mul(&u1, &a->x, &z22);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u2, &b->x, &z12);
|
rustsecp256k1_v0_1_1_fe_mul(&u2, &b->x, &z12);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s1, &a->y, &z22); rustsecp256k1_v0_1_0_fe_mul(&s1, &s1, &b->z);
|
rustsecp256k1_v0_1_1_fe_mul(&s1, &a->y, &z22); rustsecp256k1_v0_1_1_fe_mul(&s1, &s1, &b->z);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_0_fe_mul(&s2, &s2, &a->z);
|
rustsecp256k1_v0_1_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_1_fe_mul(&s2, &s2, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_0_fe_add(&h, &u2);
|
rustsecp256k1_v0_1_1_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_1_fe_add(&h, &u2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_0_fe_add(&i, &s2);
|
rustsecp256k1_v0_1_1_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_1_fe_add(&i, &s2);
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&h)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&h)) {
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&i)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&i)) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(r, a, rzr);
|
rustsecp256k1_v0_1_1_gej_double_var(r, a, rzr);
|
||||||
} else {
|
} else {
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(rzr, 0);
|
rustsecp256k1_v0_1_1_fe_set_int(rzr, 0);
|
||||||
}
|
}
|
||||||
r->infinity = 1;
|
r->infinity = 1;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&i2, &i);
|
rustsecp256k1_v0_1_1_fe_sqr(&i2, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&h2, &h);
|
rustsecp256k1_v0_1_1_fe_sqr(&h2, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h, &h2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h, &h, &b->z);
|
rustsecp256k1_v0_1_1_fe_mul(&h, &h, &b->z);
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
*rzr = h;
|
*rzr = h;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &a->z, &h);
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &a->z, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t, &u1, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&t, &u1, &h2);
|
||||||
r->x = t; rustsecp256k1_v0_1_0_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_0_fe_add(&r->x, &h3); rustsecp256k1_v0_1_0_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_0_fe_add(&r->x, &i2);
|
r->x = t; rustsecp256k1_v0_1_1_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_1_fe_add(&r->x, &h3); rustsecp256k1_v0_1_1_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_1_fe_add(&r->x, &i2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_0_fe_add(&r->y, &t); rustsecp256k1_v0_1_0_fe_mul(&r->y, &r->y, &i);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_1_fe_add(&r->y, &t); rustsecp256k1_v0_1_1_fe_mul(&r->y, &r->y, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_0_fe_negate(&h3, &h3, 1);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_1_fe_negate(&h3, &h3, 1);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&r->y, &h3);
|
rustsecp256k1_v0_1_1_fe_add(&r->y, &h3);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_ge_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b, rustsecp256k1_v0_1_0_fe *rzr) {
|
static void rustsecp256k1_v0_1_1_gej_add_ge_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b, rustsecp256k1_v0_1_1_fe *rzr) {
|
||||||
/* 8 mul, 3 sqr, 4 normalize, 12 mul_int/add/negate */
|
/* 8 mul, 3 sqr, 4 normalize, 12 mul_int/add/negate */
|
||||||
rustsecp256k1_v0_1_0_fe z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
rustsecp256k1_v0_1_1_fe z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
VERIFY_CHECK(rzr == NULL);
|
VERIFY_CHECK(rzr == NULL);
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(r, b);
|
rustsecp256k1_v0_1_1_gej_set_ge(r, b);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (b->infinity) {
|
if (b->infinity) {
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(rzr, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(rzr, 1);
|
||||||
}
|
}
|
||||||
*r = *a;
|
*r = *a;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z12, &a->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z12, &a->z);
|
||||||
u1 = a->x; rustsecp256k1_v0_1_0_fe_normalize_weak(&u1);
|
u1 = a->x; rustsecp256k1_v0_1_1_fe_normalize_weak(&u1);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u2, &b->x, &z12);
|
rustsecp256k1_v0_1_1_fe_mul(&u2, &b->x, &z12);
|
||||||
s1 = a->y; rustsecp256k1_v0_1_0_fe_normalize_weak(&s1);
|
s1 = a->y; rustsecp256k1_v0_1_1_fe_normalize_weak(&s1);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_0_fe_mul(&s2, &s2, &a->z);
|
rustsecp256k1_v0_1_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_1_fe_mul(&s2, &s2, &a->z);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_0_fe_add(&h, &u2);
|
rustsecp256k1_v0_1_1_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_1_fe_add(&h, &u2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_0_fe_add(&i, &s2);
|
rustsecp256k1_v0_1_1_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_1_fe_add(&i, &s2);
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&h)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&h)) {
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&i)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&i)) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(r, a, rzr);
|
rustsecp256k1_v0_1_1_gej_double_var(r, a, rzr);
|
||||||
} else {
|
} else {
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(rzr, 0);
|
rustsecp256k1_v0_1_1_fe_set_int(rzr, 0);
|
||||||
}
|
}
|
||||||
r->infinity = 1;
|
r->infinity = 1;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&i2, &i);
|
rustsecp256k1_v0_1_1_fe_sqr(&i2, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&h2, &h);
|
rustsecp256k1_v0_1_1_fe_sqr(&h2, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h, &h2);
|
||||||
if (rzr != NULL) {
|
if (rzr != NULL) {
|
||||||
*rzr = h;
|
*rzr = h;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &a->z, &h);
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &a->z, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t, &u1, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&t, &u1, &h2);
|
||||||
r->x = t; rustsecp256k1_v0_1_0_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_0_fe_add(&r->x, &h3); rustsecp256k1_v0_1_0_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_0_fe_add(&r->x, &i2);
|
r->x = t; rustsecp256k1_v0_1_1_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_1_fe_add(&r->x, &h3); rustsecp256k1_v0_1_1_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_1_fe_add(&r->x, &i2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_0_fe_add(&r->y, &t); rustsecp256k1_v0_1_0_fe_mul(&r->y, &r->y, &i);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_1_fe_add(&r->y, &t); rustsecp256k1_v0_1_1_fe_mul(&r->y, &r->y, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_0_fe_negate(&h3, &h3, 1);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_1_fe_negate(&h3, &h3, 1);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&r->y, &h3);
|
rustsecp256k1_v0_1_1_fe_add(&r->y, &h3);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_zinv_var(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b, const rustsecp256k1_v0_1_0_fe *bzinv) {
|
static void rustsecp256k1_v0_1_1_gej_add_zinv_var(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b, const rustsecp256k1_v0_1_1_fe *bzinv) {
|
||||||
/* 9 mul, 3 sqr, 4 normalize, 12 mul_int/add/negate */
|
/* 9 mul, 3 sqr, 4 normalize, 12 mul_int/add/negate */
|
||||||
rustsecp256k1_v0_1_0_fe az, z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
rustsecp256k1_v0_1_1_fe az, z12, u1, u2, s1, s2, h, i, i2, h2, h3, t;
|
||||||
|
|
||||||
if (b->infinity) {
|
if (b->infinity) {
|
||||||
*r = *a;
|
*r = *a;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
rustsecp256k1_v0_1_0_fe bzinv2, bzinv3;
|
rustsecp256k1_v0_1_1_fe bzinv2, bzinv3;
|
||||||
r->infinity = b->infinity;
|
r->infinity = b->infinity;
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&bzinv2, bzinv);
|
rustsecp256k1_v0_1_1_fe_sqr(&bzinv2, bzinv);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&bzinv3, &bzinv2, bzinv);
|
rustsecp256k1_v0_1_1_fe_mul(&bzinv3, &bzinv2, bzinv);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->x, &b->x, &bzinv2);
|
rustsecp256k1_v0_1_1_fe_mul(&r->x, &b->x, &bzinv2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->y, &b->y, &bzinv3);
|
rustsecp256k1_v0_1_1_fe_mul(&r->y, &b->y, &bzinv3);
|
||||||
rustsecp256k1_v0_1_0_fe_set_int(&r->z, 1);
|
rustsecp256k1_v0_1_1_fe_set_int(&r->z, 1);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
|
@ -493,40 +493,40 @@ static void rustsecp256k1_v0_1_0_gej_add_zinv_var(rustsecp256k1_v0_1_0_gej *r, c
|
||||||
* The variable az below holds the modified Z coordinate for a, which is used
|
* The variable az below holds the modified Z coordinate for a, which is used
|
||||||
* for the computation of rx and ry, but not for rz.
|
* for the computation of rx and ry, but not for rz.
|
||||||
*/
|
*/
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&az, &a->z, bzinv);
|
rustsecp256k1_v0_1_1_fe_mul(&az, &a->z, bzinv);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z12, &az);
|
rustsecp256k1_v0_1_1_fe_sqr(&z12, &az);
|
||||||
u1 = a->x; rustsecp256k1_v0_1_0_fe_normalize_weak(&u1);
|
u1 = a->x; rustsecp256k1_v0_1_1_fe_normalize_weak(&u1);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u2, &b->x, &z12);
|
rustsecp256k1_v0_1_1_fe_mul(&u2, &b->x, &z12);
|
||||||
s1 = a->y; rustsecp256k1_v0_1_0_fe_normalize_weak(&s1);
|
s1 = a->y; rustsecp256k1_v0_1_1_fe_normalize_weak(&s1);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_0_fe_mul(&s2, &s2, &az);
|
rustsecp256k1_v0_1_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1_v0_1_1_fe_mul(&s2, &s2, &az);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_0_fe_add(&h, &u2);
|
rustsecp256k1_v0_1_1_fe_negate(&h, &u1, 1); rustsecp256k1_v0_1_1_fe_add(&h, &u2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_0_fe_add(&i, &s2);
|
rustsecp256k1_v0_1_1_fe_negate(&i, &s1, 1); rustsecp256k1_v0_1_1_fe_add(&i, &s2);
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&h)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&h)) {
|
||||||
if (rustsecp256k1_v0_1_0_fe_normalizes_to_zero_var(&i)) {
|
if (rustsecp256k1_v0_1_1_fe_normalizes_to_zero_var(&i)) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(r, a, NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(r, a, NULL);
|
||||||
} else {
|
} else {
|
||||||
r->infinity = 1;
|
r->infinity = 1;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&i2, &i);
|
rustsecp256k1_v0_1_1_fe_sqr(&i2, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&h2, &h);
|
rustsecp256k1_v0_1_1_fe_sqr(&h2, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h, &h2);
|
||||||
r->z = a->z; rustsecp256k1_v0_1_0_fe_mul(&r->z, &r->z, &h);
|
r->z = a->z; rustsecp256k1_v0_1_1_fe_mul(&r->z, &r->z, &h);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t, &u1, &h2);
|
rustsecp256k1_v0_1_1_fe_mul(&t, &u1, &h2);
|
||||||
r->x = t; rustsecp256k1_v0_1_0_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_0_fe_add(&r->x, &h3); rustsecp256k1_v0_1_0_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_0_fe_add(&r->x, &i2);
|
r->x = t; rustsecp256k1_v0_1_1_fe_mul_int(&r->x, 2); rustsecp256k1_v0_1_1_fe_add(&r->x, &h3); rustsecp256k1_v0_1_1_fe_negate(&r->x, &r->x, 3); rustsecp256k1_v0_1_1_fe_add(&r->x, &i2);
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_0_fe_add(&r->y, &t); rustsecp256k1_v0_1_0_fe_mul(&r->y, &r->y, &i);
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &r->x, 5); rustsecp256k1_v0_1_1_fe_add(&r->y, &t); rustsecp256k1_v0_1_1_fe_mul(&r->y, &r->y, &i);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_0_fe_negate(&h3, &h3, 1);
|
rustsecp256k1_v0_1_1_fe_mul(&h3, &h3, &s1); rustsecp256k1_v0_1_1_fe_negate(&h3, &h3, 1);
|
||||||
rustsecp256k1_v0_1_0_fe_add(&r->y, &h3);
|
rustsecp256k1_v0_1_1_fe_add(&r->y, &h3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_add_ge(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_gej *a, const rustsecp256k1_v0_1_0_ge *b) {
|
static void rustsecp256k1_v0_1_1_gej_add_ge(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_gej *a, const rustsecp256k1_v0_1_1_ge *b) {
|
||||||
/* Operations: 7 mul, 5 sqr, 4 normalize, 21 mul_int/add/negate/cmov */
|
/* Operations: 7 mul, 5 sqr, 4 normalize, 21 mul_int/add/negate/cmov */
|
||||||
static const rustsecp256k1_v0_1_0_fe fe_1 = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1);
|
static const rustsecp256k1_v0_1_1_fe fe_1 = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1);
|
||||||
rustsecp256k1_v0_1_0_fe zz, u1, u2, s1, s2, t, tt, m, n, q, rr;
|
rustsecp256k1_v0_1_1_fe zz, u1, u2, s1, s2, t, tt, m, n, q, rr;
|
||||||
rustsecp256k1_v0_1_0_fe m_alt, rr_alt;
|
rustsecp256k1_v0_1_1_fe m_alt, rr_alt;
|
||||||
int infinity, degenerate;
|
int infinity, degenerate;
|
||||||
VERIFY_CHECK(!b->infinity);
|
VERIFY_CHECK(!b->infinity);
|
||||||
VERIFY_CHECK(a->infinity == 0 || a->infinity == 1);
|
VERIFY_CHECK(a->infinity == 0 || a->infinity == 1);
|
||||||
|
@ -581,115 +581,115 @@ static void rustsecp256k1_v0_1_0_gej_add_ge(rustsecp256k1_v0_1_0_gej *r, const r
|
||||||
* so this covers everything.
|
* so this covers everything.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&zz, &a->z); /* z = Z1^2 */
|
rustsecp256k1_v0_1_1_fe_sqr(&zz, &a->z); /* z = Z1^2 */
|
||||||
u1 = a->x; rustsecp256k1_v0_1_0_fe_normalize_weak(&u1); /* u1 = U1 = X1*Z2^2 (1) */
|
u1 = a->x; rustsecp256k1_v0_1_1_fe_normalize_weak(&u1); /* u1 = U1 = X1*Z2^2 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u2, &b->x, &zz); /* u2 = U2 = X2*Z1^2 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&u2, &b->x, &zz); /* u2 = U2 = X2*Z1^2 (1) */
|
||||||
s1 = a->y; rustsecp256k1_v0_1_0_fe_normalize_weak(&s1); /* s1 = S1 = Y1*Z2^3 (1) */
|
s1 = a->y; rustsecp256k1_v0_1_1_fe_normalize_weak(&s1); /* s1 = S1 = Y1*Z2^3 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s2, &b->y, &zz); /* s2 = Y2*Z1^2 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&s2, &b->y, &zz); /* s2 = Y2*Z1^2 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s2, &s2, &a->z); /* s2 = S2 = Y2*Z1^3 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&s2, &s2, &a->z); /* s2 = S2 = Y2*Z1^3 (1) */
|
||||||
t = u1; rustsecp256k1_v0_1_0_fe_add(&t, &u2); /* t = T = U1+U2 (2) */
|
t = u1; rustsecp256k1_v0_1_1_fe_add(&t, &u2); /* t = T = U1+U2 (2) */
|
||||||
m = s1; rustsecp256k1_v0_1_0_fe_add(&m, &s2); /* m = M = S1+S2 (2) */
|
m = s1; rustsecp256k1_v0_1_1_fe_add(&m, &s2); /* m = M = S1+S2 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&rr, &t); /* rr = T^2 (1) */
|
rustsecp256k1_v0_1_1_fe_sqr(&rr, &t); /* rr = T^2 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&m_alt, &u2, 1); /* Malt = -X2*Z1^2 */
|
rustsecp256k1_v0_1_1_fe_negate(&m_alt, &u2, 1); /* Malt = -X2*Z1^2 */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&tt, &u1, &m_alt); /* tt = -U1*U2 (2) */
|
rustsecp256k1_v0_1_1_fe_mul(&tt, &u1, &m_alt); /* tt = -U1*U2 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&rr, &tt); /* rr = R = T^2-U1*U2 (3) */
|
rustsecp256k1_v0_1_1_fe_add(&rr, &tt); /* rr = R = T^2-U1*U2 (3) */
|
||||||
/** If lambda = R/M = 0/0 we have a problem (except in the "trivial"
|
/** If lambda = R/M = 0/0 we have a problem (except in the "trivial"
|
||||||
* case that Z = z1z2 = 0, and this is special-cased later on). */
|
* case that Z = z1z2 = 0, and this is special-cased later on). */
|
||||||
degenerate = rustsecp256k1_v0_1_0_fe_normalizes_to_zero(&m) &
|
degenerate = rustsecp256k1_v0_1_1_fe_normalizes_to_zero(&m) &
|
||||||
rustsecp256k1_v0_1_0_fe_normalizes_to_zero(&rr);
|
rustsecp256k1_v0_1_1_fe_normalizes_to_zero(&rr);
|
||||||
/* This only occurs when y1 == -y2 and x1^3 == x2^3, but x1 != x2.
|
/* This only occurs when y1 == -y2 and x1^3 == x2^3, but x1 != x2.
|
||||||
* This means either x1 == beta*x2 or beta*x1 == x2, where beta is
|
* This means either x1 == beta*x2 or beta*x1 == x2, where beta is
|
||||||
* a nontrivial cube root of one. In either case, an alternate
|
* a nontrivial cube root of one. In either case, an alternate
|
||||||
* non-indeterminate expression for lambda is (y1 - y2)/(x1 - x2),
|
* non-indeterminate expression for lambda is (y1 - y2)/(x1 - x2),
|
||||||
* so we set R/M equal to this. */
|
* so we set R/M equal to this. */
|
||||||
rr_alt = s1;
|
rr_alt = s1;
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&rr_alt, 2); /* rr = Y1*Z2^3 - Y2*Z1^3 (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&rr_alt, 2); /* rr = Y1*Z2^3 - Y2*Z1^3 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&m_alt, &u1); /* Malt = X1*Z2^2 - X2*Z1^2 */
|
rustsecp256k1_v0_1_1_fe_add(&m_alt, &u1); /* Malt = X1*Z2^2 - X2*Z1^2 */
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&rr_alt, &rr, !degenerate);
|
rustsecp256k1_v0_1_1_fe_cmov(&rr_alt, &rr, !degenerate);
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&m_alt, &m, !degenerate);
|
rustsecp256k1_v0_1_1_fe_cmov(&m_alt, &m, !degenerate);
|
||||||
/* Now Ralt / Malt = lambda and is guaranteed not to be 0/0.
|
/* Now Ralt / Malt = lambda and is guaranteed not to be 0/0.
|
||||||
* From here on out Ralt and Malt represent the numerator
|
* From here on out Ralt and Malt represent the numerator
|
||||||
* and denominator of lambda; R and M represent the explicit
|
* and denominator of lambda; R and M represent the explicit
|
||||||
* expressions x1^2 + x2^2 + x1x2 and y1 + y2. */
|
* expressions x1^2 + x2^2 + x1x2 and y1 + y2. */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&n, &m_alt); /* n = Malt^2 (1) */
|
rustsecp256k1_v0_1_1_fe_sqr(&n, &m_alt); /* n = Malt^2 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&q, &n, &t); /* q = Q = T*Malt^2 (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&q, &n, &t); /* q = Q = T*Malt^2 (1) */
|
||||||
/* These two lines use the observation that either M == Malt or M == 0,
|
/* These two lines use the observation that either M == Malt or M == 0,
|
||||||
* so M^3 * Malt is either Malt^4 (which is computed by squaring), or
|
* so M^3 * Malt is either Malt^4 (which is computed by squaring), or
|
||||||
* zero (which is "computed" by cmov). So the cost is one squaring
|
* zero (which is "computed" by cmov). So the cost is one squaring
|
||||||
* versus two multiplications. */
|
* versus two multiplications. */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&n, &n);
|
rustsecp256k1_v0_1_1_fe_sqr(&n, &n);
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&n, &m, degenerate); /* n = M^3 * Malt (2) */
|
rustsecp256k1_v0_1_1_fe_cmov(&n, &m, degenerate); /* n = M^3 * Malt (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&t, &rr_alt); /* t = Ralt^2 (1) */
|
rustsecp256k1_v0_1_1_fe_sqr(&t, &rr_alt); /* t = Ralt^2 (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &a->z, &m_alt); /* r->z = Malt*Z (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &a->z, &m_alt); /* r->z = Malt*Z (1) */
|
||||||
infinity = rustsecp256k1_v0_1_0_fe_normalizes_to_zero(&r->z) * (1 - a->infinity);
|
infinity = rustsecp256k1_v0_1_1_fe_normalizes_to_zero(&r->z) * (1 - a->infinity);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&r->z, 2); /* r->z = Z3 = 2*Malt*Z (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&r->z, 2); /* r->z = Z3 = 2*Malt*Z (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&q, &q, 1); /* q = -Q (2) */
|
rustsecp256k1_v0_1_1_fe_negate(&q, &q, 1); /* q = -Q (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&t, &q); /* t = Ralt^2-Q (3) */
|
rustsecp256k1_v0_1_1_fe_add(&t, &q); /* t = Ralt^2-Q (3) */
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&t);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&t);
|
||||||
r->x = t; /* r->x = Ralt^2-Q (1) */
|
r->x = t; /* r->x = Ralt^2-Q (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&t, 2); /* t = 2*x3 (2) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&t, 2); /* t = 2*x3 (2) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&t, &q); /* t = 2*x3 - Q: (4) */
|
rustsecp256k1_v0_1_1_fe_add(&t, &q); /* t = 2*x3 - Q: (4) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&t, &t, &rr_alt); /* t = Ralt*(2*x3 - Q) (1) */
|
rustsecp256k1_v0_1_1_fe_mul(&t, &t, &rr_alt); /* t = Ralt*(2*x3 - Q) (1) */
|
||||||
rustsecp256k1_v0_1_0_fe_add(&t, &n); /* t = Ralt*(2*x3 - Q) + M^3*Malt (3) */
|
rustsecp256k1_v0_1_1_fe_add(&t, &n); /* t = Ralt*(2*x3 - Q) + M^3*Malt (3) */
|
||||||
rustsecp256k1_v0_1_0_fe_negate(&r->y, &t, 3); /* r->y = Ralt*(Q - 2x3) - M^3*Malt (4) */
|
rustsecp256k1_v0_1_1_fe_negate(&r->y, &t, 3); /* r->y = Ralt*(Q - 2x3) - M^3*Malt (4) */
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_weak(&r->y);
|
rustsecp256k1_v0_1_1_fe_normalize_weak(&r->y);
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&r->x, 4); /* r->x = X3 = 4*(Ralt^2-Q) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&r->x, 4); /* r->x = X3 = 4*(Ralt^2-Q) */
|
||||||
rustsecp256k1_v0_1_0_fe_mul_int(&r->y, 4); /* r->y = Y3 = 4*Ralt*(Q - 2x3) - 4*M^3*Malt (4) */
|
rustsecp256k1_v0_1_1_fe_mul_int(&r->y, 4); /* r->y = Y3 = 4*Ralt*(Q - 2x3) - 4*M^3*Malt (4) */
|
||||||
|
|
||||||
/** In case a->infinity == 1, replace r with (b->x, b->y, 1). */
|
/** In case a->infinity == 1, replace r with (b->x, b->y, 1). */
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&r->x, &b->x, a->infinity);
|
rustsecp256k1_v0_1_1_fe_cmov(&r->x, &b->x, a->infinity);
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&r->y, &b->y, a->infinity);
|
rustsecp256k1_v0_1_1_fe_cmov(&r->y, &b->y, a->infinity);
|
||||||
rustsecp256k1_v0_1_0_fe_cmov(&r->z, &fe_1, a->infinity);
|
rustsecp256k1_v0_1_1_fe_cmov(&r->z, &fe_1, a->infinity);
|
||||||
r->infinity = infinity;
|
r->infinity = infinity;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_gej_rescale(rustsecp256k1_v0_1_0_gej *r, const rustsecp256k1_v0_1_0_fe *s) {
|
static void rustsecp256k1_v0_1_1_gej_rescale(rustsecp256k1_v0_1_1_gej *r, const rustsecp256k1_v0_1_1_fe *s) {
|
||||||
/* Operations: 4 mul, 1 sqr */
|
/* Operations: 4 mul, 1 sqr */
|
||||||
rustsecp256k1_v0_1_0_fe zz;
|
rustsecp256k1_v0_1_1_fe zz;
|
||||||
VERIFY_CHECK(!rustsecp256k1_v0_1_0_fe_is_zero(s));
|
VERIFY_CHECK(!rustsecp256k1_v0_1_1_fe_is_zero(s));
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&zz, s);
|
rustsecp256k1_v0_1_1_fe_sqr(&zz, s);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->x, &r->x, &zz); /* r->x *= s^2 */
|
rustsecp256k1_v0_1_1_fe_mul(&r->x, &r->x, &zz); /* r->x *= s^2 */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->y, &r->y, &zz);
|
rustsecp256k1_v0_1_1_fe_mul(&r->y, &r->y, &zz);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->y, &r->y, s); /* r->y *= s^3 */
|
rustsecp256k1_v0_1_1_fe_mul(&r->y, &r->y, s); /* r->y *= s^3 */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->z, &r->z, s); /* r->z *= s */
|
rustsecp256k1_v0_1_1_fe_mul(&r->z, &r->z, s); /* r->z *= s */
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_to_storage(rustsecp256k1_v0_1_0_ge_storage *r, const rustsecp256k1_v0_1_0_ge *a) {
|
static void rustsecp256k1_v0_1_1_ge_to_storage(rustsecp256k1_v0_1_1_ge_storage *r, const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
rustsecp256k1_v0_1_0_fe x, y;
|
rustsecp256k1_v0_1_1_fe x, y;
|
||||||
VERIFY_CHECK(!a->infinity);
|
VERIFY_CHECK(!a->infinity);
|
||||||
x = a->x;
|
x = a->x;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&x);
|
rustsecp256k1_v0_1_1_fe_normalize(&x);
|
||||||
y = a->y;
|
y = a->y;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&y);
|
rustsecp256k1_v0_1_1_fe_normalize(&y);
|
||||||
rustsecp256k1_v0_1_0_fe_to_storage(&r->x, &x);
|
rustsecp256k1_v0_1_1_fe_to_storage(&r->x, &x);
|
||||||
rustsecp256k1_v0_1_0_fe_to_storage(&r->y, &y);
|
rustsecp256k1_v0_1_1_fe_to_storage(&r->y, &y);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ge_from_storage(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge_storage *a) {
|
static void rustsecp256k1_v0_1_1_ge_from_storage(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge_storage *a) {
|
||||||
rustsecp256k1_v0_1_0_fe_from_storage(&r->x, &a->x);
|
rustsecp256k1_v0_1_1_fe_from_storage(&r->x, &a->x);
|
||||||
rustsecp256k1_v0_1_0_fe_from_storage(&r->y, &a->y);
|
rustsecp256k1_v0_1_1_fe_from_storage(&r->y, &a->y);
|
||||||
r->infinity = 0;
|
r->infinity = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_ge_storage_cmov(rustsecp256k1_v0_1_0_ge_storage *r, const rustsecp256k1_v0_1_0_ge_storage *a, int flag) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_ge_storage_cmov(rustsecp256k1_v0_1_1_ge_storage *r, const rustsecp256k1_v0_1_1_ge_storage *a, int flag) {
|
||||||
rustsecp256k1_v0_1_0_fe_storage_cmov(&r->x, &a->x, flag);
|
rustsecp256k1_v0_1_1_fe_storage_cmov(&r->x, &a->x, flag);
|
||||||
rustsecp256k1_v0_1_0_fe_storage_cmov(&r->y, &a->y, flag);
|
rustsecp256k1_v0_1_1_fe_storage_cmov(&r->y, &a->y, flag);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
static void rustsecp256k1_v0_1_0_ge_mul_lambda(rustsecp256k1_v0_1_0_ge *r, const rustsecp256k1_v0_1_0_ge *a) {
|
static void rustsecp256k1_v0_1_1_ge_mul_lambda(rustsecp256k1_v0_1_1_ge *r, const rustsecp256k1_v0_1_1_ge *a) {
|
||||||
static const rustsecp256k1_v0_1_0_fe beta = SECP256K1_FE_CONST(
|
static const rustsecp256k1_v0_1_1_fe beta = SECP256K1_FE_CONST(
|
||||||
0x7ae96a2bul, 0x657c0710ul, 0x6e64479eul, 0xac3434e9ul,
|
0x7ae96a2bul, 0x657c0710ul, 0x6e64479eul, 0xac3434e9ul,
|
||||||
0x9cf04975ul, 0x12f58995ul, 0xc1396c28ul, 0x719501eeul
|
0x9cf04975ul, 0x12f58995ul, 0xc1396c28ul, 0x719501eeul
|
||||||
);
|
);
|
||||||
*r = *a;
|
*r = *a;
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&r->x, &r->x, &beta);
|
rustsecp256k1_v0_1_1_fe_mul(&r->x, &r->x, &beta);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_gej_has_quad_y_var(const rustsecp256k1_v0_1_0_gej *a) {
|
static int rustsecp256k1_v0_1_1_gej_has_quad_y_var(const rustsecp256k1_v0_1_1_gej *a) {
|
||||||
rustsecp256k1_v0_1_0_fe yz;
|
rustsecp256k1_v0_1_1_fe yz;
|
||||||
|
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -698,8 +698,8 @@ static int rustsecp256k1_v0_1_0_gej_has_quad_y_var(const rustsecp256k1_v0_1_0_ge
|
||||||
/* We rely on the fact that the Jacobi symbol of 1 / a->z^3 is the same as
|
/* We rely on the fact that the Jacobi symbol of 1 / a->z^3 is the same as
|
||||||
* that of a->z. Thus a->y / a->z^3 is a quadratic residue iff a->y * a->z
|
* that of a->z. Thus a->y / a->z^3 is a quadratic residue iff a->y * a->z
|
||||||
is */
|
is */
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&yz, &a->y, &a->z);
|
rustsecp256k1_v0_1_1_fe_mul(&yz, &a->y, &a->z);
|
||||||
return rustsecp256k1_v0_1_0_fe_is_quad_var(&yz);
|
return rustsecp256k1_v0_1_1_fe_is_quad_var(&yz);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* SECP256K1_GROUP_IMPL_H */
|
#endif /* SECP256K1_GROUP_IMPL_H */
|
||||||
|
|
|
@ -14,28 +14,28 @@ typedef struct {
|
||||||
uint32_t s[8];
|
uint32_t s[8];
|
||||||
uint32_t buf[16]; /* In big endian */
|
uint32_t buf[16]; /* In big endian */
|
||||||
size_t bytes;
|
size_t bytes;
|
||||||
} rustsecp256k1_v0_1_0_sha256;
|
} rustsecp256k1_v0_1_1_sha256;
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_sha256_initialize(rustsecp256k1_v0_1_0_sha256 *hash);
|
static void rustsecp256k1_v0_1_1_sha256_initialize(rustsecp256k1_v0_1_1_sha256 *hash);
|
||||||
static void rustsecp256k1_v0_1_0_sha256_write(rustsecp256k1_v0_1_0_sha256 *hash, const unsigned char *data, size_t size);
|
static void rustsecp256k1_v0_1_1_sha256_write(rustsecp256k1_v0_1_1_sha256 *hash, const unsigned char *data, size_t size);
|
||||||
static void rustsecp256k1_v0_1_0_sha256_finalize(rustsecp256k1_v0_1_0_sha256 *hash, unsigned char *out32);
|
static void rustsecp256k1_v0_1_1_sha256_finalize(rustsecp256k1_v0_1_1_sha256 *hash, unsigned char *out32);
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_sha256 inner, outer;
|
rustsecp256k1_v0_1_1_sha256 inner, outer;
|
||||||
} rustsecp256k1_v0_1_0_hmac_sha256;
|
} rustsecp256k1_v0_1_1_hmac_sha256;
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_initialize(rustsecp256k1_v0_1_0_hmac_sha256 *hash, const unsigned char *key, size_t size);
|
static void rustsecp256k1_v0_1_1_hmac_sha256_initialize(rustsecp256k1_v0_1_1_hmac_sha256 *hash, const unsigned char *key, size_t size);
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_write(rustsecp256k1_v0_1_0_hmac_sha256 *hash, const unsigned char *data, size_t size);
|
static void rustsecp256k1_v0_1_1_hmac_sha256_write(rustsecp256k1_v0_1_1_hmac_sha256 *hash, const unsigned char *data, size_t size);
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_finalize(rustsecp256k1_v0_1_0_hmac_sha256 *hash, unsigned char *out32);
|
static void rustsecp256k1_v0_1_1_hmac_sha256_finalize(rustsecp256k1_v0_1_1_hmac_sha256 *hash, unsigned char *out32);
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
unsigned char v[32];
|
unsigned char v[32];
|
||||||
unsigned char k[32];
|
unsigned char k[32];
|
||||||
int retry;
|
int retry;
|
||||||
} rustsecp256k1_v0_1_0_rfc6979_hmac_sha256;
|
} rustsecp256k1_v0_1_1_rfc6979_hmac_sha256;
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen);
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen);
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen);
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen);
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_finalize(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng);
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_finalize(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng);
|
||||||
|
|
||||||
#endif /* SECP256K1_HASH_H */
|
#endif /* SECP256K1_HASH_H */
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
#define BE32(p) ((((p) & 0xFF) << 24) | (((p) & 0xFF00) << 8) | (((p) & 0xFF0000) >> 8) | (((p) & 0xFF000000) >> 24))
|
#define BE32(p) ((((p) & 0xFF) << 24) | (((p) & 0xFF00) << 8) | (((p) & 0xFF0000) >> 8) | (((p) & 0xFF000000) >> 24))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_sha256_initialize(rustsecp256k1_v0_1_0_sha256 *hash) {
|
static void rustsecp256k1_v0_1_1_sha256_initialize(rustsecp256k1_v0_1_1_sha256 *hash) {
|
||||||
hash->s[0] = 0x6a09e667ul;
|
hash->s[0] = 0x6a09e667ul;
|
||||||
hash->s[1] = 0xbb67ae85ul;
|
hash->s[1] = 0xbb67ae85ul;
|
||||||
hash->s[2] = 0x3c6ef372ul;
|
hash->s[2] = 0x3c6ef372ul;
|
||||||
|
@ -46,7 +46,7 @@ static void rustsecp256k1_v0_1_0_sha256_initialize(rustsecp256k1_v0_1_0_sha256 *
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Perform one SHA-256 transformation, processing 16 big endian 32-bit words. */
|
/** Perform one SHA-256 transformation, processing 16 big endian 32-bit words. */
|
||||||
static void rustsecp256k1_v0_1_0_sha256_transform(uint32_t* s, const uint32_t* chunk) {
|
static void rustsecp256k1_v0_1_1_sha256_transform(uint32_t* s, const uint32_t* chunk) {
|
||||||
uint32_t a = s[0], b = s[1], c = s[2], d = s[3], e = s[4], f = s[5], g = s[6], h = s[7];
|
uint32_t a = s[0], b = s[1], c = s[2], d = s[3], e = s[4], f = s[5], g = s[6], h = s[7];
|
||||||
uint32_t w0, w1, w2, w3, w4, w5, w6, w7, w8, w9, w10, w11, w12, w13, w14, w15;
|
uint32_t w0, w1, w2, w3, w4, w5, w6, w7, w8, w9, w10, w11, w12, w13, w14, w15;
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ static void rustsecp256k1_v0_1_0_sha256_transform(uint32_t* s, const uint32_t* c
|
||||||
s[7] += h;
|
s[7] += h;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_sha256_write(rustsecp256k1_v0_1_0_sha256 *hash, const unsigned char *data, size_t len) {
|
static void rustsecp256k1_v0_1_1_sha256_write(rustsecp256k1_v0_1_1_sha256 *hash, const unsigned char *data, size_t len) {
|
||||||
size_t bufsize = hash->bytes & 0x3F;
|
size_t bufsize = hash->bytes & 0x3F;
|
||||||
hash->bytes += len;
|
hash->bytes += len;
|
||||||
while (bufsize + len >= 64) {
|
while (bufsize + len >= 64) {
|
||||||
|
@ -137,7 +137,7 @@ static void rustsecp256k1_v0_1_0_sha256_write(rustsecp256k1_v0_1_0_sha256 *hash,
|
||||||
memcpy(((unsigned char*)hash->buf) + bufsize, data, chunk_len);
|
memcpy(((unsigned char*)hash->buf) + bufsize, data, chunk_len);
|
||||||
data += chunk_len;
|
data += chunk_len;
|
||||||
len -= chunk_len;
|
len -= chunk_len;
|
||||||
rustsecp256k1_v0_1_0_sha256_transform(hash->s, hash->buf);
|
rustsecp256k1_v0_1_1_sha256_transform(hash->s, hash->buf);
|
||||||
bufsize = 0;
|
bufsize = 0;
|
||||||
}
|
}
|
||||||
if (len) {
|
if (len) {
|
||||||
|
@ -146,15 +146,15 @@ static void rustsecp256k1_v0_1_0_sha256_write(rustsecp256k1_v0_1_0_sha256 *hash,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_sha256_finalize(rustsecp256k1_v0_1_0_sha256 *hash, unsigned char *out32) {
|
static void rustsecp256k1_v0_1_1_sha256_finalize(rustsecp256k1_v0_1_1_sha256 *hash, unsigned char *out32) {
|
||||||
static const unsigned char pad[64] = {0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
static const unsigned char pad[64] = {0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
||||||
uint32_t sizedesc[2];
|
uint32_t sizedesc[2];
|
||||||
uint32_t out[8];
|
uint32_t out[8];
|
||||||
int i = 0;
|
int i = 0;
|
||||||
sizedesc[0] = BE32(hash->bytes >> 29);
|
sizedesc[0] = BE32(hash->bytes >> 29);
|
||||||
sizedesc[1] = BE32(hash->bytes << 3);
|
sizedesc[1] = BE32(hash->bytes << 3);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64));
|
rustsecp256k1_v0_1_1_sha256_write(hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64));
|
||||||
rustsecp256k1_v0_1_0_sha256_write(hash, (const unsigned char*)sizedesc, 8);
|
rustsecp256k1_v0_1_1_sha256_write(hash, (const unsigned char*)sizedesc, 8);
|
||||||
for (i = 0; i < 8; i++) {
|
for (i = 0; i < 8; i++) {
|
||||||
out[i] = BE32(hash->s[i]);
|
out[i] = BE32(hash->s[i]);
|
||||||
hash->s[i] = 0;
|
hash->s[i] = 0;
|
||||||
|
@ -162,49 +162,49 @@ static void rustsecp256k1_v0_1_0_sha256_finalize(rustsecp256k1_v0_1_0_sha256 *ha
|
||||||
memcpy(out32, (const unsigned char*)out, 32);
|
memcpy(out32, (const unsigned char*)out, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_initialize(rustsecp256k1_v0_1_0_hmac_sha256 *hash, const unsigned char *key, size_t keylen) {
|
static void rustsecp256k1_v0_1_1_hmac_sha256_initialize(rustsecp256k1_v0_1_1_hmac_sha256 *hash, const unsigned char *key, size_t keylen) {
|
||||||
size_t n;
|
size_t n;
|
||||||
unsigned char rkey[64];
|
unsigned char rkey[64];
|
||||||
if (keylen <= sizeof(rkey)) {
|
if (keylen <= sizeof(rkey)) {
|
||||||
memcpy(rkey, key, keylen);
|
memcpy(rkey, key, keylen);
|
||||||
memset(rkey + keylen, 0, sizeof(rkey) - keylen);
|
memset(rkey + keylen, 0, sizeof(rkey) - keylen);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_sha256 sha256;
|
rustsecp256k1_v0_1_1_sha256 sha256;
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&sha256);
|
rustsecp256k1_v0_1_1_sha256_initialize(&sha256);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha256, key, keylen);
|
rustsecp256k1_v0_1_1_sha256_write(&sha256, key, keylen);
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&sha256, rkey);
|
rustsecp256k1_v0_1_1_sha256_finalize(&sha256, rkey);
|
||||||
memset(rkey + 32, 0, 32);
|
memset(rkey + 32, 0, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&hash->outer);
|
rustsecp256k1_v0_1_1_sha256_initialize(&hash->outer);
|
||||||
for (n = 0; n < sizeof(rkey); n++) {
|
for (n = 0; n < sizeof(rkey); n++) {
|
||||||
rkey[n] ^= 0x5c;
|
rkey[n] ^= 0x5c;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&hash->outer, rkey, sizeof(rkey));
|
rustsecp256k1_v0_1_1_sha256_write(&hash->outer, rkey, sizeof(rkey));
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&hash->inner);
|
rustsecp256k1_v0_1_1_sha256_initialize(&hash->inner);
|
||||||
for (n = 0; n < sizeof(rkey); n++) {
|
for (n = 0; n < sizeof(rkey); n++) {
|
||||||
rkey[n] ^= 0x5c ^ 0x36;
|
rkey[n] ^= 0x5c ^ 0x36;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&hash->inner, rkey, sizeof(rkey));
|
rustsecp256k1_v0_1_1_sha256_write(&hash->inner, rkey, sizeof(rkey));
|
||||||
memset(rkey, 0, sizeof(rkey));
|
memset(rkey, 0, sizeof(rkey));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_write(rustsecp256k1_v0_1_0_hmac_sha256 *hash, const unsigned char *data, size_t size) {
|
static void rustsecp256k1_v0_1_1_hmac_sha256_write(rustsecp256k1_v0_1_1_hmac_sha256 *hash, const unsigned char *data, size_t size) {
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&hash->inner, data, size);
|
rustsecp256k1_v0_1_1_sha256_write(&hash->inner, data, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_hmac_sha256_finalize(rustsecp256k1_v0_1_0_hmac_sha256 *hash, unsigned char *out32) {
|
static void rustsecp256k1_v0_1_1_hmac_sha256_finalize(rustsecp256k1_v0_1_1_hmac_sha256 *hash, unsigned char *out32) {
|
||||||
unsigned char temp[32];
|
unsigned char temp[32];
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&hash->inner, temp);
|
rustsecp256k1_v0_1_1_sha256_finalize(&hash->inner, temp);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&hash->outer, temp, 32);
|
rustsecp256k1_v0_1_1_sha256_write(&hash->outer, temp, 32);
|
||||||
memset(temp, 0, 32);
|
memset(temp, 0, 32);
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&hash->outer, out32);
|
rustsecp256k1_v0_1_1_sha256_finalize(&hash->outer, out32);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) {
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) {
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256 hmac;
|
rustsecp256k1_v0_1_1_hmac_sha256 hmac;
|
||||||
static const unsigned char zero[1] = {0x00};
|
static const unsigned char zero[1] = {0x00};
|
||||||
static const unsigned char one[1] = {0x01};
|
static const unsigned char one[1] = {0x01};
|
||||||
|
|
||||||
|
@ -212,47 +212,47 @@ static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(rustsecp256k1_v0
|
||||||
memset(rng->k, 0x00, 32); /* RFC6979 3.2.c. */
|
memset(rng->k, 0x00, 32); /* RFC6979 3.2.c. */
|
||||||
|
|
||||||
/* RFC6979 3.2.d. */
|
/* RFC6979 3.2.d. */
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, zero, 1);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, zero, 1);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, key, keylen);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, key, keylen);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->k);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->k);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->v);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->v);
|
||||||
|
|
||||||
/* RFC6979 3.2.f. */
|
/* RFC6979 3.2.f. */
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, one, 1);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, one, 1);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, key, keylen);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, key, keylen);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->k);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->k);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->v);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->v);
|
||||||
rng->retry = 0;
|
rng->retry = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) {
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) {
|
||||||
/* RFC6979 3.2.h. */
|
/* RFC6979 3.2.h. */
|
||||||
static const unsigned char zero[1] = {0x00};
|
static const unsigned char zero[1] = {0x00};
|
||||||
if (rng->retry) {
|
if (rng->retry) {
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256 hmac;
|
rustsecp256k1_v0_1_1_hmac_sha256 hmac;
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, zero, 1);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, zero, 1);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->k);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->k);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->v);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->v);
|
||||||
}
|
}
|
||||||
|
|
||||||
while (outlen > 0) {
|
while (outlen > 0) {
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256 hmac;
|
rustsecp256k1_v0_1_1_hmac_sha256 hmac;
|
||||||
int now = outlen;
|
int now = outlen;
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_initialize(&hmac, rng->k, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_initialize(&hmac, rng->k, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_write(&hmac, rng->v, 32);
|
rustsecp256k1_v0_1_1_hmac_sha256_write(&hmac, rng->v, 32);
|
||||||
rustsecp256k1_v0_1_0_hmac_sha256_finalize(&hmac, rng->v);
|
rustsecp256k1_v0_1_1_hmac_sha256_finalize(&hmac, rng->v);
|
||||||
if (now > 32) {
|
if (now > 32) {
|
||||||
now = 32;
|
now = 32;
|
||||||
}
|
}
|
||||||
|
@ -264,7 +264,7 @@ static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(rustsecp256k1_v0_1
|
||||||
rng->retry = 1;
|
rng->retry = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_finalize(rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 *rng) {
|
static void rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_finalize(rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 *rng) {
|
||||||
memset(rng->k, 0, 32);
|
memset(rng->k, 0, 32);
|
||||||
memset(rng->v, 0, 32);
|
memset(rng->v, 0, 32);
|
||||||
rng->retry = 0;
|
rng->retry = 0;
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class NativeSecp256k1 {
|
||||||
|
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
return rustsecp256k1_v0_1_0_ecdsa_verify(byteBuff, Secp256k1Context.getContext(), signature.length, pub.length) == 1;
|
return rustsecp256k1_v0_1_1_ecdsa_verify(byteBuff, Secp256k1Context.getContext(), signature.length, pub.length) == 1;
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -101,7 +101,7 @@ public class NativeSecp256k1 {
|
||||||
|
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_ecdsa_sign(byteBuff, Secp256k1Context.getContext());
|
retByteArray = rustsecp256k1_v0_1_1_ecdsa_sign(byteBuff, Secp256k1Context.getContext());
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,7 @@ public class NativeSecp256k1 {
|
||||||
|
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
return rustsecp256k1_v0_1_0_ec_seckey_verify(byteBuff,Secp256k1Context.getContext()) == 1;
|
return rustsecp256k1_v0_1_1_ec_seckey_verify(byteBuff,Secp256k1Context.getContext()) == 1;
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,7 @@ public class NativeSecp256k1 {
|
||||||
|
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_ec_pubkey_create(byteBuff, Secp256k1Context.getContext());
|
retByteArray = rustsecp256k1_v0_1_1_ec_pubkey_create(byteBuff, Secp256k1Context.getContext());
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -187,7 +187,7 @@ public class NativeSecp256k1 {
|
||||||
public static synchronized void cleanup() {
|
public static synchronized void cleanup() {
|
||||||
w.lock();
|
w.lock();
|
||||||
try {
|
try {
|
||||||
rustsecp256k1_v0_1_0_destroy_context(Secp256k1Context.getContext());
|
rustsecp256k1_v0_1_1_destroy_context(Secp256k1Context.getContext());
|
||||||
} finally {
|
} finally {
|
||||||
w.unlock();
|
w.unlock();
|
||||||
}
|
}
|
||||||
|
@ -196,7 +196,7 @@ public class NativeSecp256k1 {
|
||||||
public static long cloneContext() {
|
public static long cloneContext() {
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
return rustsecp256k1_v0_1_0_ctx_clone(Secp256k1Context.getContext());
|
return rustsecp256k1_v0_1_1_ctx_clone(Secp256k1Context.getContext());
|
||||||
} finally { r.unlock(); }
|
} finally { r.unlock(); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +222,7 @@ public class NativeSecp256k1 {
|
||||||
byte[][] retByteArray;
|
byte[][] retByteArray;
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_privkey_tweak_mul(byteBuff,Secp256k1Context.getContext());
|
retByteArray = rustsecp256k1_v0_1_1_privkey_tweak_mul(byteBuff,Secp256k1Context.getContext());
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -261,7 +261,7 @@ public class NativeSecp256k1 {
|
||||||
byte[][] retByteArray;
|
byte[][] retByteArray;
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_privkey_tweak_add(byteBuff,Secp256k1Context.getContext());
|
retByteArray = rustsecp256k1_v0_1_1_privkey_tweak_add(byteBuff,Secp256k1Context.getContext());
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -300,7 +300,7 @@ public class NativeSecp256k1 {
|
||||||
byte[][] retByteArray;
|
byte[][] retByteArray;
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_pubkey_tweak_add(byteBuff,Secp256k1Context.getContext(), pubkey.length);
|
retByteArray = rustsecp256k1_v0_1_1_pubkey_tweak_add(byteBuff,Secp256k1Context.getContext(), pubkey.length);
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -339,7 +339,7 @@ public class NativeSecp256k1 {
|
||||||
byte[][] retByteArray;
|
byte[][] retByteArray;
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_pubkey_tweak_mul(byteBuff,Secp256k1Context.getContext(), pubkey.length);
|
retByteArray = rustsecp256k1_v0_1_1_pubkey_tweak_mul(byteBuff,Secp256k1Context.getContext(), pubkey.length);
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -378,7 +378,7 @@ public class NativeSecp256k1 {
|
||||||
byte[][] retByteArray;
|
byte[][] retByteArray;
|
||||||
r.lock();
|
r.lock();
|
||||||
try {
|
try {
|
||||||
retByteArray = rustsecp256k1_v0_1_0_ecdh(byteBuff, Secp256k1Context.getContext(), pubkey.length);
|
retByteArray = rustsecp256k1_v0_1_1_ecdh(byteBuff, Secp256k1Context.getContext(), pubkey.length);
|
||||||
} finally {
|
} finally {
|
||||||
r.unlock();
|
r.unlock();
|
||||||
}
|
}
|
||||||
|
@ -411,36 +411,36 @@ public class NativeSecp256k1 {
|
||||||
|
|
||||||
w.lock();
|
w.lock();
|
||||||
try {
|
try {
|
||||||
return rustsecp256k1_v0_1_0_context_randomize(byteBuff, Secp256k1Context.getContext()) == 1;
|
return rustsecp256k1_v0_1_1_context_randomize(byteBuff, Secp256k1Context.getContext()) == 1;
|
||||||
} finally {
|
} finally {
|
||||||
w.unlock();
|
w.unlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static native long rustsecp256k1_v0_1_0_ctx_clone(long context);
|
private static native long rustsecp256k1_v0_1_1_ctx_clone(long context);
|
||||||
|
|
||||||
private static native int rustsecp256k1_v0_1_0_context_randomize(ByteBuffer byteBuff, long context);
|
private static native int rustsecp256k1_v0_1_1_context_randomize(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_privkey_tweak_add(ByteBuffer byteBuff, long context);
|
private static native byte[][] rustsecp256k1_v0_1_1_privkey_tweak_add(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_privkey_tweak_mul(ByteBuffer byteBuff, long context);
|
private static native byte[][] rustsecp256k1_v0_1_1_privkey_tweak_mul(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_pubkey_tweak_add(ByteBuffer byteBuff, long context, int pubLen);
|
private static native byte[][] rustsecp256k1_v0_1_1_pubkey_tweak_add(ByteBuffer byteBuff, long context, int pubLen);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_pubkey_tweak_mul(ByteBuffer byteBuff, long context, int pubLen);
|
private static native byte[][] rustsecp256k1_v0_1_1_pubkey_tweak_mul(ByteBuffer byteBuff, long context, int pubLen);
|
||||||
|
|
||||||
private static native void rustsecp256k1_v0_1_0_destroy_context(long context);
|
private static native void rustsecp256k1_v0_1_1_destroy_context(long context);
|
||||||
|
|
||||||
private static native int rustsecp256k1_v0_1_0_ecdsa_verify(ByteBuffer byteBuff, long context, int sigLen, int pubLen);
|
private static native int rustsecp256k1_v0_1_1_ecdsa_verify(ByteBuffer byteBuff, long context, int sigLen, int pubLen);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_ecdsa_sign(ByteBuffer byteBuff, long context);
|
private static native byte[][] rustsecp256k1_v0_1_1_ecdsa_sign(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native int rustsecp256k1_v0_1_0_ec_seckey_verify(ByteBuffer byteBuff, long context);
|
private static native int rustsecp256k1_v0_1_1_ec_seckey_verify(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_ec_pubkey_create(ByteBuffer byteBuff, long context);
|
private static native byte[][] rustsecp256k1_v0_1_1_ec_pubkey_create(ByteBuffer byteBuff, long context);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_ec_pubkey_parse(ByteBuffer byteBuff, long context, int inputLen);
|
private static native byte[][] rustsecp256k1_v0_1_1_ec_pubkey_parse(ByteBuffer byteBuff, long context, int inputLen);
|
||||||
|
|
||||||
private static native byte[][] rustsecp256k1_v0_1_0_ecdh(ByteBuffer byteBuff, long context, int inputLen);
|
private static native byte[][] rustsecp256k1_v0_1_1_ecdh(ByteBuffer byteBuff, long context, int inputLen);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ public class Secp256k1Context {
|
||||||
long contextRef = -1;
|
long contextRef = -1;
|
||||||
try {
|
try {
|
||||||
System.loadLibrary("secp256k1");
|
System.loadLibrary("secp256k1");
|
||||||
contextRef = rustsecp256k1_v0_1_0_init_context();
|
contextRef = rustsecp256k1_v0_1_1_init_context();
|
||||||
} catch (UnsatisfiedLinkError e) {
|
} catch (UnsatisfiedLinkError e) {
|
||||||
System.out.println("UnsatisfiedLinkError: " + e.toString());
|
System.out.println("UnsatisfiedLinkError: " + e.toString());
|
||||||
isEnabled = false;
|
isEnabled = false;
|
||||||
|
@ -47,5 +47,5 @@ public class Secp256k1Context {
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static native long rustsecp256k1_v0_1_0_init_context();
|
private static native long rustsecp256k1_v0_1_1_init_context();
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,12 +7,12 @@
|
||||||
#include "include/secp256k1_recovery.h"
|
#include "include/secp256k1_recovery.h"
|
||||||
|
|
||||||
|
|
||||||
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ctx_1clone
|
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ctx_1clone
|
||||||
(JNIEnv* env, jclass classObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
const rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
const rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
|
|
||||||
jlong ctx_clone_l = (uintptr_t) rustsecp256k1_v0_1_0_context_clone(ctx);
|
jlong ctx_clone_l = (uintptr_t) rustsecp256k1_v0_1_1_context_clone(ctx);
|
||||||
|
|
||||||
(void)classObject;(void)env;
|
(void)classObject;(void)env;
|
||||||
|
|
||||||
|
@ -20,48 +20,48 @@ SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1context_1randomize
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1context_1randomize
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
|
|
||||||
const unsigned char* seed = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
const unsigned char* seed = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
|
|
||||||
(void)classObject;
|
(void)classObject;
|
||||||
|
|
||||||
return rustsecp256k1_v0_1_0_context_randomize(ctx, seed);
|
return rustsecp256k1_v0_1_1_context_randomize(ctx, seed);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API void JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1destroy_1context
|
SECP256K1_API void JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1destroy_1context
|
||||||
(JNIEnv* env, jclass classObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_destroy(ctx);
|
rustsecp256k1_v0_1_1_context_destroy(ctx);
|
||||||
|
|
||||||
(void)classObject;(void)env;
|
(void)classObject;(void)env;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdsa_1verify
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdsa_1verify
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint siglen, jint publen)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint siglen, jint publen)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
|
|
||||||
unsigned char* data = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* data = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* sigdata = { (unsigned char*) (data + 32) };
|
const unsigned char* sigdata = { (unsigned char*) (data + 32) };
|
||||||
const unsigned char* pubdata = { (unsigned char*) (data + siglen + 32) };
|
const unsigned char* pubdata = { (unsigned char*) (data + siglen + 32) };
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigdata, siglen);
|
int ret = rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigdata, siglen);
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
ret = rustsecp256k1_v0_1_0_ec_pubkey_parse(ctx, &pubkey, pubdata, publen);
|
ret = rustsecp256k1_v0_1_1_ec_pubkey_parse(ctx, &pubkey, pubdata, publen);
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
ret = rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, data, &pubkey);
|
ret = rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, data, &pubkey);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,10 +70,10 @@ SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdsa_1sign
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdsa_1sign
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
unsigned char* data = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* data = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
unsigned char* secKey = (unsigned char*) (data + 32);
|
unsigned char* secKey = (unsigned char*) (data + 32);
|
||||||
|
|
||||||
|
@ -81,15 +81,15 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
jbyteArray sigArray, intsByteArray;
|
jbyteArray sigArray, intsByteArray;
|
||||||
unsigned char intsarray[2];
|
unsigned char intsarray[2];
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig[72];
|
rustsecp256k1_v0_1_1_ecdsa_signature sig[72];
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ecdsa_sign(ctx, sig, data, secKey, NULL, NULL);
|
int ret = rustsecp256k1_v0_1_1_ecdsa_sign(ctx, sig, data, secKey, NULL, NULL);
|
||||||
|
|
||||||
unsigned char outputSer[72];
|
unsigned char outputSer[72];
|
||||||
size_t outputLen = 72;
|
size_t outputLen = 72;
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
int ret2 = rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(ctx,outputSer, &outputLen, sig ); (void)ret2;
|
int ret2 = rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der(ctx,outputSer, &outputLen, sig ); (void)ret2;
|
||||||
}
|
}
|
||||||
|
|
||||||
intsarray[0] = outputLen;
|
intsarray[0] = outputLen;
|
||||||
|
@ -112,36 +112,36 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
return retArray;
|
return retArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ec_1seckey_1verify
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ec_1seckey_1verify
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
unsigned char* secKey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* secKey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
|
|
||||||
(void)classObject;
|
(void)classObject;
|
||||||
|
|
||||||
return rustsecp256k1_v0_1_0_ec_seckey_verify(ctx, secKey);
|
return rustsecp256k1_v0_1_1_ec_seckey_verify(ctx, secKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ec_1pubkey_1create
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ec_1pubkey_1create
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
const unsigned char* secKey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
const unsigned char* secKey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
|
|
||||||
jobjectArray retArray;
|
jobjectArray retArray;
|
||||||
jbyteArray pubkeyArray, intsByteArray;
|
jbyteArray pubkeyArray, intsByteArray;
|
||||||
unsigned char intsarray[2];
|
unsigned char intsarray[2];
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &pubkey, secKey);
|
int ret = rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &pubkey, secKey);
|
||||||
|
|
||||||
unsigned char outputSer[65];
|
unsigned char outputSer[65];
|
||||||
size_t outputLen = 65;
|
size_t outputLen = 65;
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
int ret2 = rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
int ret2 = rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
||||||
}
|
}
|
||||||
|
|
||||||
intsarray[0] = outputLen;
|
intsarray[0] = outputLen;
|
||||||
|
@ -165,10 +165,10 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1privkey_1tweak_1add
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1privkey_1tweak_1add
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
unsigned char* privkey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* privkey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* tweak = (unsigned char*) (privkey + 32);
|
const unsigned char* tweak = (unsigned char*) (privkey + 32);
|
||||||
|
|
||||||
|
@ -178,7 +178,7 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
|
|
||||||
int privkeylen = 32;
|
int privkeylen = 32;
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_privkey_tweak_add(ctx, privkey, tweak);
|
int ret = rustsecp256k1_v0_1_1_ec_privkey_tweak_add(ctx, privkey, tweak);
|
||||||
|
|
||||||
intsarray[0] = privkeylen;
|
intsarray[0] = privkeylen;
|
||||||
intsarray[1] = ret;
|
intsarray[1] = ret;
|
||||||
|
@ -200,10 +200,10 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
return retArray;
|
return retArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1privkey_1tweak_1mul
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1privkey_1tweak_1mul
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
unsigned char* privkey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* privkey = (unsigned char*) (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* tweak = (unsigned char*) (privkey + 32);
|
const unsigned char* tweak = (unsigned char*) (privkey + 32);
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
|
|
||||||
int privkeylen = 32;
|
int privkeylen = 32;
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_privkey_tweak_mul(ctx, privkey, tweak);
|
int ret = rustsecp256k1_v0_1_1_ec_privkey_tweak_mul(ctx, privkey, tweak);
|
||||||
|
|
||||||
intsarray[0] = privkeylen;
|
intsarray[0] = privkeylen;
|
||||||
intsarray[1] = ret;
|
intsarray[1] = ret;
|
||||||
|
@ -235,11 +235,11 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
return retArray;
|
return retArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1pubkey_1tweak_1add
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1pubkey_1tweak_1add
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
/* rustsecp256k1_v0_1_0_pubkey* pubkey = (rustsecp256k1_v0_1_0_pubkey*) (*env)->GetDirectBufferAddress(env, byteBufferObject);*/
|
/* rustsecp256k1_v0_1_1_pubkey* pubkey = (rustsecp256k1_v0_1_1_pubkey*) (*env)->GetDirectBufferAddress(env, byteBufferObject);*/
|
||||||
unsigned char* pkey = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* pkey = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* tweak = (unsigned char*) (pkey + publen);
|
const unsigned char* tweak = (unsigned char*) (pkey + publen);
|
||||||
|
|
||||||
|
@ -249,15 +249,15 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
unsigned char outputSer[65];
|
unsigned char outputSer[65];
|
||||||
size_t outputLen = 65;
|
size_t outputLen = 65;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_pubkey_parse(ctx, &pubkey, pkey, publen);
|
int ret = rustsecp256k1_v0_1_1_ec_pubkey_parse(ctx, &pubkey, pkey, publen);
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
ret = rustsecp256k1_v0_1_0_ec_pubkey_tweak_add(ctx, &pubkey, tweak);
|
ret = rustsecp256k1_v0_1_1_ec_pubkey_tweak_add(ctx, &pubkey, tweak);
|
||||||
}
|
}
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
int ret2 = rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
int ret2 = rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
||||||
}
|
}
|
||||||
|
|
||||||
intsarray[0] = outputLen;
|
intsarray[0] = outputLen;
|
||||||
|
@ -280,10 +280,10 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
return retArray;
|
return retArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1pubkey_1tweak_1mul
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1pubkey_1tweak_1mul
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
unsigned char* pkey = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
unsigned char* pkey = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* tweak = (unsigned char*) (pkey + publen);
|
const unsigned char* tweak = (unsigned char*) (pkey + publen);
|
||||||
|
|
||||||
|
@ -293,15 +293,15 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
unsigned char outputSer[65];
|
unsigned char outputSer[65];
|
||||||
size_t outputLen = 65;
|
size_t outputLen = 65;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_pubkey_parse(ctx, &pubkey, pkey, publen);
|
int ret = rustsecp256k1_v0_1_1_ec_pubkey_parse(ctx, &pubkey, pkey, publen);
|
||||||
|
|
||||||
if ( ret ) {
|
if ( ret ) {
|
||||||
ret = rustsecp256k1_v0_1_0_ec_pubkey_tweak_mul(ctx, &pubkey, tweak);
|
ret = rustsecp256k1_v0_1_1_ec_pubkey_tweak_mul(ctx, &pubkey, tweak);
|
||||||
}
|
}
|
||||||
|
|
||||||
if( ret ) {
|
if( ret ) {
|
||||||
int ret2 = rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
int ret2 = rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx,outputSer, &outputLen, &pubkey,SECP256K1_EC_UNCOMPRESSED );(void)ret2;
|
||||||
}
|
}
|
||||||
|
|
||||||
intsarray[0] = outputLen;
|
intsarray[0] = outputLen;
|
||||||
|
@ -324,7 +324,7 @@ SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k
|
||||||
return retArray;
|
return retArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdsa_1pubkey_1combine
|
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdsa_1pubkey_1combine
|
||||||
(JNIEnv * env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint numkeys)
|
(JNIEnv * env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint numkeys)
|
||||||
{
|
{
|
||||||
(void)classObject;(void)env;(void)byteBufferObject;(void)ctx_l;(void)numkeys;
|
(void)classObject;(void)env;(void)byteBufferObject;(void)ctx_l;(void)numkeys;
|
||||||
|
@ -332,24 +332,24 @@ SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdh
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdh
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = (rustsecp256k1_v0_1_0_context*)(uintptr_t)ctx_l;
|
rustsecp256k1_v0_1_1_context *ctx = (rustsecp256k1_v0_1_1_context*)(uintptr_t)ctx_l;
|
||||||
const unsigned char* secdata = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
const unsigned char* secdata = (*env)->GetDirectBufferAddress(env, byteBufferObject);
|
||||||
const unsigned char* pubdata = (const unsigned char*) (secdata + 32);
|
const unsigned char* pubdata = (const unsigned char*) (secdata + 32);
|
||||||
|
|
||||||
jobjectArray retArray;
|
jobjectArray retArray;
|
||||||
jbyteArray outArray, intsByteArray;
|
jbyteArray outArray, intsByteArray;
|
||||||
unsigned char intsarray[1];
|
unsigned char intsarray[1];
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
unsigned char nonce_res[32];
|
unsigned char nonce_res[32];
|
||||||
size_t outputLen = 32;
|
size_t outputLen = 32;
|
||||||
|
|
||||||
int ret = rustsecp256k1_v0_1_0_ec_pubkey_parse(ctx, &pubkey, pubdata, publen);
|
int ret = rustsecp256k1_v0_1_1_ec_pubkey_parse(ctx, &pubkey, pubdata, publen);
|
||||||
|
|
||||||
if (ret) {
|
if (ret) {
|
||||||
ret = rustsecp256k1_v0_1_0_ecdh(
|
ret = rustsecp256k1_v0_1_1_ecdh(
|
||||||
ctx,
|
ctx,
|
||||||
nonce_res,
|
nonce_res,
|
||||||
&pubkey,
|
&pubkey,
|
||||||
|
|
|
@ -10,106 +10,106 @@ extern "C" {
|
||||||
#endif
|
#endif
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ctx_clone
|
* Method: rustsecp256k1_v0_1_1_ctx_clone
|
||||||
* Signature: (J)J
|
* Signature: (J)J
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ctx_1clone
|
SECP256K1_API jlong JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ctx_1clone
|
||||||
(JNIEnv *, jclass, jlong);
|
(JNIEnv *, jclass, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_context_randomize
|
* Method: rustsecp256k1_v0_1_1_context_randomize
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)I
|
* Signature: (Ljava/nio/ByteBuffer;J)I
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1context_1randomize
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1context_1randomize
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_privkey_tweak_add
|
* Method: rustsecp256k1_v0_1_1_privkey_tweak_add
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1privkey_1tweak_1add
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1privkey_1tweak_1add
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_privkey_tweak_mul
|
* Method: rustsecp256k1_v0_1_1_privkey_tweak_mul
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1privkey_1tweak_1mul
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1privkey_1tweak_1mul
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_pubkey_tweak_add
|
* Method: rustsecp256k1_v0_1_1_pubkey_tweak_add
|
||||||
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1pubkey_1tweak_1add
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1pubkey_1tweak_1add
|
||||||
(JNIEnv *, jclass, jobject, jlong, jint);
|
(JNIEnv *, jclass, jobject, jlong, jint);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_pubkey_tweak_mul
|
* Method: rustsecp256k1_v0_1_1_pubkey_tweak_mul
|
||||||
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1pubkey_1tweak_1mul
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1pubkey_1tweak_1mul
|
||||||
(JNIEnv *, jclass, jobject, jlong, jint);
|
(JNIEnv *, jclass, jobject, jlong, jint);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_destroy_context
|
* Method: rustsecp256k1_v0_1_1_destroy_context
|
||||||
* Signature: (J)V
|
* Signature: (J)V
|
||||||
*/
|
*/
|
||||||
SECP256K1_API void JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1destroy_1context
|
SECP256K1_API void JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1destroy_1context
|
||||||
(JNIEnv *, jclass, jlong);
|
(JNIEnv *, jclass, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ecdsa_verify
|
* Method: rustsecp256k1_v0_1_1_ecdsa_verify
|
||||||
* Signature: (Ljava/nio/ByteBuffer;JII)I
|
* Signature: (Ljava/nio/ByteBuffer;JII)I
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdsa_1verify
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdsa_1verify
|
||||||
(JNIEnv *, jclass, jobject, jlong, jint, jint);
|
(JNIEnv *, jclass, jobject, jlong, jint, jint);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ecdsa_sign
|
* Method: rustsecp256k1_v0_1_1_ecdsa_sign
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdsa_1sign
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdsa_1sign
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ec_seckey_verify
|
* Method: rustsecp256k1_v0_1_1_ec_seckey_verify
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)I
|
* Signature: (Ljava/nio/ByteBuffer;J)I
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ec_1seckey_1verify
|
SECP256K1_API jint JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ec_1seckey_1verify
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ec_pubkey_create
|
* Method: rustsecp256k1_v0_1_1_ec_pubkey_create
|
||||||
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
* Signature: (Ljava/nio/ByteBuffer;J)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ec_1pubkey_1create
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ec_1pubkey_1create
|
||||||
(JNIEnv *, jclass, jobject, jlong);
|
(JNIEnv *, jclass, jobject, jlong);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ec_pubkey_parse
|
* Method: rustsecp256k1_v0_1_1_ec_pubkey_parse
|
||||||
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ec_1pubkey_1parse
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ec_1pubkey_1parse
|
||||||
(JNIEnv *, jclass, jobject, jlong, jint);
|
(JNIEnv *, jclass, jobject, jlong, jint);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_NativeSecp256k1
|
* Class: org_bitcoin_NativeSecp256k1
|
||||||
* Method: rustsecp256k1_v0_1_0_ecdh
|
* Method: rustsecp256k1_v0_1_1_ecdh
|
||||||
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
* Signature: (Ljava/nio/ByteBuffer;JI)[[B
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_0_1ecdh
|
SECP256K1_API jobjectArray JNICALL Java_org_bitcoin_NativeSecp256k1_rustsecp256k1_v0_1_1_1ecdh
|
||||||
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen);
|
(JNIEnv* env, jclass classObject, jobject byteBufferObject, jlong ctx_l, jint publen);
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,10 @@
|
||||||
#include "org_bitcoin_Secp256k1Context.h"
|
#include "org_bitcoin_Secp256k1Context.h"
|
||||||
#include "include/secp256k1.h"
|
#include "include/secp256k1.h"
|
||||||
|
|
||||||
SECP256K1_API jlong JNICALL Java_org_bitcoin_Secp256k1Context_rustsecp256k1_v0_1_0_1init_1context
|
SECP256K1_API jlong JNICALL Java_org_bitcoin_Secp256k1Context_rustsecp256k1_v0_1_1_1init_1context
|
||||||
(JNIEnv* env, jclass classObject)
|
(JNIEnv* env, jclass classObject)
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_context *ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
rustsecp256k1_v0_1_1_context *ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
||||||
|
|
||||||
(void)classObject;(void)env;
|
(void)classObject;(void)env;
|
||||||
|
|
||||||
|
|
|
@ -10,10 +10,10 @@ extern "C" {
|
||||||
#endif
|
#endif
|
||||||
/*
|
/*
|
||||||
* Class: org_bitcoin_Secp256k1Context
|
* Class: org_bitcoin_Secp256k1Context
|
||||||
* Method: rustsecp256k1_v0_1_0_init_context
|
* Method: rustsecp256k1_v0_1_1_init_context
|
||||||
* Signature: ()J
|
* Signature: ()J
|
||||||
*/
|
*/
|
||||||
SECP256K1_API jlong JNICALL Java_org_bitcoin_Secp256k1Context_rustsecp256k1_v0_1_0_1init_1context
|
SECP256K1_API jlong JNICALL Java_org_bitcoin_Secp256k1Context_rustsecp256k1_v0_1_1_1init_1context
|
||||||
(JNIEnv *, jclass);
|
(JNIEnv *, jclass);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
include_HEADERS += include/rustsecp256k1_v0_1_0_ecdh.h
|
include_HEADERS += include/rustsecp256k1_v0_1_1_ecdh.h
|
||||||
noinst_HEADERS += src/modules/ecdh/main_impl.h
|
noinst_HEADERS += src/modules/ecdh/main_impl.h
|
||||||
noinst_HEADERS += src/modules/ecdh/tests_impl.h
|
noinst_HEADERS += src/modules/ecdh/tests_impl.h
|
||||||
if USE_BENCHMARK
|
if USE_BENCHMARK
|
||||||
|
|
|
@ -12,55 +12,55 @@
|
||||||
|
|
||||||
static int ecdh_hash_function_sha256(unsigned char *output, const unsigned char *x, const unsigned char *y, void *data) {
|
static int ecdh_hash_function_sha256(unsigned char *output, const unsigned char *x, const unsigned char *y, void *data) {
|
||||||
unsigned char version = (y[31] & 0x01) | 0x02;
|
unsigned char version = (y[31] & 0x01) | 0x02;
|
||||||
rustsecp256k1_v0_1_0_sha256 sha;
|
rustsecp256k1_v0_1_1_sha256 sha;
|
||||||
(void)data;
|
(void)data;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&sha);
|
rustsecp256k1_v0_1_1_sha256_initialize(&sha);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha, &version, 1);
|
rustsecp256k1_v0_1_1_sha256_write(&sha, &version, 1);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha, x, 32);
|
rustsecp256k1_v0_1_1_sha256_write(&sha, x, 32);
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&sha, output);
|
rustsecp256k1_v0_1_1_sha256_finalize(&sha, output);
|
||||||
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rustsecp256k1_v0_1_0_ecdh_hash_function rustsecp256k1_v0_1_0_ecdh_hash_function_sha256 = ecdh_hash_function_sha256;
|
const rustsecp256k1_v0_1_1_ecdh_hash_function rustsecp256k1_v0_1_1_ecdh_hash_function_sha256 = ecdh_hash_function_sha256;
|
||||||
const rustsecp256k1_v0_1_0_ecdh_hash_function rustsecp256k1_v0_1_0_ecdh_hash_function_default = ecdh_hash_function_sha256;
|
const rustsecp256k1_v0_1_1_ecdh_hash_function rustsecp256k1_v0_1_1_ecdh_hash_function_default = ecdh_hash_function_sha256;
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdh(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *output, const rustsecp256k1_v0_1_0_pubkey *point, const unsigned char *scalar, rustsecp256k1_v0_1_0_ecdh_hash_function hashfp, void *data) {
|
int rustsecp256k1_v0_1_1_ecdh(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *output, const rustsecp256k1_v0_1_1_pubkey *point, const unsigned char *scalar, rustsecp256k1_v0_1_1_ecdh_hash_function hashfp, void *data) {
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
rustsecp256k1_v0_1_0_gej res;
|
rustsecp256k1_v0_1_1_gej res;
|
||||||
rustsecp256k1_v0_1_0_ge pt;
|
rustsecp256k1_v0_1_1_ge pt;
|
||||||
rustsecp256k1_v0_1_0_scalar s;
|
rustsecp256k1_v0_1_1_scalar s;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(output != NULL);
|
ARG_CHECK(output != NULL);
|
||||||
ARG_CHECK(point != NULL);
|
ARG_CHECK(point != NULL);
|
||||||
ARG_CHECK(scalar != NULL);
|
ARG_CHECK(scalar != NULL);
|
||||||
if (hashfp == NULL) {
|
if (hashfp == NULL) {
|
||||||
hashfp = rustsecp256k1_v0_1_0_ecdh_hash_function_default;
|
hashfp = rustsecp256k1_v0_1_1_ecdh_hash_function_default;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_pubkey_load(ctx, &pt, point);
|
rustsecp256k1_v0_1_1_pubkey_load(ctx, &pt, point);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&s, scalar, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&s, scalar, &overflow);
|
||||||
if (overflow || rustsecp256k1_v0_1_0_scalar_is_zero(&s)) {
|
if (overflow || rustsecp256k1_v0_1_1_scalar_is_zero(&s)) {
|
||||||
ret = 0;
|
ret = 0;
|
||||||
} else {
|
} else {
|
||||||
unsigned char x[32];
|
unsigned char x[32];
|
||||||
unsigned char y[32];
|
unsigned char y[32];
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecmult_const(&res, &pt, &s, 256);
|
rustsecp256k1_v0_1_1_ecmult_const(&res, &pt, &s, 256);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&pt, &res);
|
rustsecp256k1_v0_1_1_ge_set_gej(&pt, &res);
|
||||||
|
|
||||||
/* Compute a hash of the point */
|
/* Compute a hash of the point */
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&pt.x);
|
rustsecp256k1_v0_1_1_fe_normalize(&pt.x);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&pt.y);
|
rustsecp256k1_v0_1_1_fe_normalize(&pt.y);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(x, &pt.x);
|
rustsecp256k1_v0_1_1_fe_get_b32(x, &pt.x);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(y, &pt.y);
|
rustsecp256k1_v0_1_1_fe_get_b32(y, &pt.y);
|
||||||
|
|
||||||
ret = hashfp(output, x, y, data);
|
ret = hashfp(output, x, y, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&s);
|
rustsecp256k1_v0_1_1_scalar_clear(&s);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,69 +26,69 @@ int ecdh_hash_function_custom(unsigned char *output, const unsigned char *x, con
|
||||||
|
|
||||||
void test_ecdh_api(void) {
|
void test_ecdh_api(void) {
|
||||||
/* Setup context that just counts errors */
|
/* Setup context that just counts errors */
|
||||||
rustsecp256k1_v0_1_0_context *tctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN);
|
rustsecp256k1_v0_1_1_context *tctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN);
|
||||||
rustsecp256k1_v0_1_0_pubkey point;
|
rustsecp256k1_v0_1_1_pubkey point;
|
||||||
unsigned char res[32];
|
unsigned char res[32];
|
||||||
unsigned char s_one[32] = { 0 };
|
unsigned char s_one[32] = { 0 };
|
||||||
int32_t ecount = 0;
|
int32_t ecount = 0;
|
||||||
s_one[31] = 1;
|
s_one[31] = 1;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_set_error_callback(tctx, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_error_callback(tctx, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_illegal_callback(tctx, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_illegal_callback(tctx, counting_illegal_callback_fn, &ecount);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(tctx, &point, s_one) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(tctx, &point, s_one) == 1);
|
||||||
|
|
||||||
/* Check all NULLs are detected */
|
/* Check all NULLs are detected */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(tctx, res, &point, s_one, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(tctx, res, &point, s_one, NULL, NULL) == 1);
|
||||||
CHECK(ecount == 0);
|
CHECK(ecount == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(tctx, NULL, &point, s_one, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(tctx, NULL, &point, s_one, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(tctx, res, NULL, s_one, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(tctx, res, NULL, s_one, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(tctx, res, &point, NULL, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(tctx, res, &point, NULL, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 3);
|
CHECK(ecount == 3);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(tctx, res, &point, s_one, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(tctx, res, &point, s_one, NULL, NULL) == 1);
|
||||||
CHECK(ecount == 3);
|
CHECK(ecount == 3);
|
||||||
|
|
||||||
/* Cleanup */
|
/* Cleanup */
|
||||||
rustsecp256k1_v0_1_0_context_destroy(tctx);
|
rustsecp256k1_v0_1_1_context_destroy(tctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_ecdh_generator_basepoint(void) {
|
void test_ecdh_generator_basepoint(void) {
|
||||||
unsigned char s_one[32] = { 0 };
|
unsigned char s_one[32] = { 0 };
|
||||||
rustsecp256k1_v0_1_0_pubkey point[2];
|
rustsecp256k1_v0_1_1_pubkey point[2];
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
s_one[31] = 1;
|
s_one[31] = 1;
|
||||||
/* Check against pubkey creation when the basepoint is the generator */
|
/* Check against pubkey creation when the basepoint is the generator */
|
||||||
for (i = 0; i < 100; ++i) {
|
for (i = 0; i < 100; ++i) {
|
||||||
rustsecp256k1_v0_1_0_sha256 sha;
|
rustsecp256k1_v0_1_1_sha256 sha;
|
||||||
unsigned char s_b32[32];
|
unsigned char s_b32[32];
|
||||||
unsigned char output_ecdh[65];
|
unsigned char output_ecdh[65];
|
||||||
unsigned char output_ser[32];
|
unsigned char output_ser[32];
|
||||||
unsigned char point_ser[65];
|
unsigned char point_ser[65];
|
||||||
size_t point_ser_len = sizeof(point_ser);
|
size_t point_ser_len = sizeof(point_ser);
|
||||||
rustsecp256k1_v0_1_0_scalar s;
|
rustsecp256k1_v0_1_1_scalar s;
|
||||||
|
|
||||||
random_scalar_order(&s);
|
random_scalar_order(&s);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(s_b32, &s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(s_b32, &s);
|
||||||
|
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &point[0], s_one) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &point[0], s_one) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &point[1], s_b32) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &point[1], s_b32) == 1);
|
||||||
|
|
||||||
/* compute using ECDH function with custom hash function */
|
/* compute using ECDH function with custom hash function */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output_ecdh, &point[0], s_b32, ecdh_hash_function_custom, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output_ecdh, &point[0], s_b32, ecdh_hash_function_custom, NULL) == 1);
|
||||||
/* compute "explicitly" */
|
/* compute "explicitly" */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx, point_ser, &point_ser_len, &point[1], SECP256K1_EC_UNCOMPRESSED) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx, point_ser, &point_ser_len, &point[1], SECP256K1_EC_UNCOMPRESSED) == 1);
|
||||||
/* compare */
|
/* compare */
|
||||||
CHECK(memcmp(output_ecdh, point_ser, 65) == 0);
|
CHECK(memcmp(output_ecdh, point_ser, 65) == 0);
|
||||||
|
|
||||||
/* compute using ECDH function with default hash function */
|
/* compute using ECDH function with default hash function */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output_ecdh, &point[0], s_b32, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output_ecdh, &point[0], s_b32, NULL, NULL) == 1);
|
||||||
/* compute "explicitly" */
|
/* compute "explicitly" */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_serialize(ctx, point_ser, &point_ser_len, &point[1], SECP256K1_EC_COMPRESSED) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_serialize(ctx, point_ser, &point_ser_len, &point[1], SECP256K1_EC_COMPRESSED) == 1);
|
||||||
rustsecp256k1_v0_1_0_sha256_initialize(&sha);
|
rustsecp256k1_v0_1_1_sha256_initialize(&sha);
|
||||||
rustsecp256k1_v0_1_0_sha256_write(&sha, point_ser, point_ser_len);
|
rustsecp256k1_v0_1_1_sha256_write(&sha, point_ser, point_ser_len);
|
||||||
rustsecp256k1_v0_1_0_sha256_finalize(&sha, output_ser);
|
rustsecp256k1_v0_1_1_sha256_finalize(&sha, output_ser);
|
||||||
/* compare */
|
/* compare */
|
||||||
CHECK(memcmp(output_ecdh, output_ser, 32) == 0);
|
CHECK(memcmp(output_ecdh, output_ser, 32) == 0);
|
||||||
}
|
}
|
||||||
|
@ -104,23 +104,23 @@ void test_bad_scalar(void) {
|
||||||
};
|
};
|
||||||
unsigned char s_rand[32] = { 0 };
|
unsigned char s_rand[32] = { 0 };
|
||||||
unsigned char output[32];
|
unsigned char output[32];
|
||||||
rustsecp256k1_v0_1_0_scalar rand;
|
rustsecp256k1_v0_1_1_scalar rand;
|
||||||
rustsecp256k1_v0_1_0_pubkey point;
|
rustsecp256k1_v0_1_1_pubkey point;
|
||||||
|
|
||||||
/* Create random point */
|
/* Create random point */
|
||||||
random_scalar_order(&rand);
|
random_scalar_order(&rand);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(s_rand, &rand);
|
rustsecp256k1_v0_1_1_scalar_get_b32(s_rand, &rand);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &point, s_rand) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &point, s_rand) == 1);
|
||||||
|
|
||||||
/* Try to multiply it by bad values */
|
/* Try to multiply it by bad values */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output, &point, s_zero, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output, &point, s_zero, NULL, NULL) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output, &point, s_overflow, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output, &point, s_overflow, NULL, NULL) == 0);
|
||||||
/* ...and a good one */
|
/* ...and a good one */
|
||||||
s_overflow[31] -= 1;
|
s_overflow[31] -= 1;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output, &point, s_overflow, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output, &point, s_overflow, NULL, NULL) == 1);
|
||||||
|
|
||||||
/* Hash function failure results in ecdh failure */
|
/* Hash function failure results in ecdh failure */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdh(ctx, output, &point, s_overflow, ecdh_hash_function_test_fail, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdh(ctx, output, &point, s_overflow, ecdh_hash_function_test_fail, NULL) == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
void run_ecdh_tests(void) {
|
void run_ecdh_tests(void) {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
include_HEADERS += include/rustsecp256k1_v0_1_0_recovery.h
|
include_HEADERS += include/rustsecp256k1_v0_1_1_recovery.h
|
||||||
noinst_HEADERS += src/modules/recovery/main_impl.h
|
noinst_HEADERS += src/modules/recovery/main_impl.h
|
||||||
noinst_HEADERS += src/modules/recovery/tests_impl.h
|
noinst_HEADERS += src/modules/recovery/tests_impl.h
|
||||||
if USE_BENCHMARK
|
if USE_BENCHMARK
|
||||||
|
|
|
@ -9,34 +9,34 @@
|
||||||
|
|
||||||
#include "include/secp256k1_recovery.h"
|
#include "include/secp256k1_recovery.h"
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_load(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_scalar* r, rustsecp256k1_v0_1_0_scalar* s, int* recid, const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig) {
|
static void rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_load(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_scalar* r, rustsecp256k1_v0_1_1_scalar* s, int* recid, const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig) {
|
||||||
(void)ctx;
|
(void)ctx;
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_scalar) == 32) {
|
if (sizeof(rustsecp256k1_v0_1_1_scalar) == 32) {
|
||||||
/* When the rustsecp256k1_v0_1_0_scalar type is exactly 32 byte, use its
|
/* When the rustsecp256k1_v0_1_1_scalar type is exactly 32 byte, use its
|
||||||
* representation inside rustsecp256k1_v0_1_0_ecdsa_signature, as conversion is very fast.
|
* representation inside rustsecp256k1_v0_1_1_ecdsa_signature, as conversion is very fast.
|
||||||
* Note that rustsecp256k1_v0_1_0_ecdsa_signature_save must use the same representation. */
|
* Note that rustsecp256k1_v0_1_1_ecdsa_signature_save must use the same representation. */
|
||||||
memcpy(r, &sig->data[0], 32);
|
memcpy(r, &sig->data[0], 32);
|
||||||
memcpy(s, &sig->data[32], 32);
|
memcpy(s, &sig->data[32], 32);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(r, &sig->data[0], NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(r, &sig->data[0], NULL);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(s, &sig->data[32], NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(s, &sig->data[32], NULL);
|
||||||
}
|
}
|
||||||
*recid = sig->data[64];
|
*recid = sig->data[64];
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_save(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig, const rustsecp256k1_v0_1_0_scalar* r, const rustsecp256k1_v0_1_0_scalar* s, int recid) {
|
static void rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_save(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig, const rustsecp256k1_v0_1_1_scalar* r, const rustsecp256k1_v0_1_1_scalar* s, int recid) {
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_scalar) == 32) {
|
if (sizeof(rustsecp256k1_v0_1_1_scalar) == 32) {
|
||||||
memcpy(&sig->data[0], r, 32);
|
memcpy(&sig->data[0], r, 32);
|
||||||
memcpy(&sig->data[32], s, 32);
|
memcpy(&sig->data[32], s, 32);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&sig->data[0], r);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&sig->data[0], r);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&sig->data[32], s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&sig->data[32], s);
|
||||||
}
|
}
|
||||||
sig->data[64] = recid;
|
sig->data[64] = recid;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig, const unsigned char *input64, int recid) {
|
int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig, const unsigned char *input64, int recid) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
int ret = 1;
|
int ret = 1;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
|
|
||||||
|
@ -45,144 +45,144 @@ int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(const rustsec
|
||||||
ARG_CHECK(input64 != NULL);
|
ARG_CHECK(input64 != NULL);
|
||||||
ARG_CHECK(recid >= 0 && recid <= 3);
|
ARG_CHECK(recid >= 0 && recid <= 3);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&r, &input64[0], &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&r, &input64[0], &overflow);
|
||||||
ret &= !overflow;
|
ret &= !overflow;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&s, &input64[32], &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&s, &input64[32], &overflow);
|
||||||
ret &= !overflow;
|
ret &= !overflow;
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_save(sig, &r, &s, recid);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_save(sig, &r, &s, recid);
|
||||||
} else {
|
} else {
|
||||||
memset(sig, 0, sizeof(*sig));
|
memset(sig, 0, sizeof(*sig));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *output64, int *recid, const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sig) {
|
int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *output64, int *recid, const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sig) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
|
|
||||||
(void)ctx;
|
(void)ctx;
|
||||||
ARG_CHECK(output64 != NULL);
|
ARG_CHECK(output64 != NULL);
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
ARG_CHECK(recid != NULL);
|
ARG_CHECK(recid != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_load(ctx, &r, &s, recid, sig);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_load(ctx, &r, &s, recid, sig);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&output64[0], &r);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&output64[0], &r);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&output64[32], &s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&output64[32], &s);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature* sig, const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature* sigin) {
|
int rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature* sig, const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature* sigin) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
int recid;
|
int recid;
|
||||||
|
|
||||||
(void)ctx;
|
(void)ctx;
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
ARG_CHECK(sigin != NULL);
|
ARG_CHECK(sigin != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, sigin);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, sigin);
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(sig, &r, &s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(sig, &r, &s);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_ecdsa_sig_recover(const rustsecp256k1_v0_1_0_ecmult_context *ctx, const rustsecp256k1_v0_1_0_scalar *sigr, const rustsecp256k1_v0_1_0_scalar* sigs, rustsecp256k1_v0_1_0_ge *pubkey, const rustsecp256k1_v0_1_0_scalar *message, int recid) {
|
static int rustsecp256k1_v0_1_1_ecdsa_sig_recover(const rustsecp256k1_v0_1_1_ecmult_context *ctx, const rustsecp256k1_v0_1_1_scalar *sigr, const rustsecp256k1_v0_1_1_scalar* sigs, rustsecp256k1_v0_1_1_ge *pubkey, const rustsecp256k1_v0_1_1_scalar *message, int recid) {
|
||||||
unsigned char brx[32];
|
unsigned char brx[32];
|
||||||
rustsecp256k1_v0_1_0_fe fx;
|
rustsecp256k1_v0_1_1_fe fx;
|
||||||
rustsecp256k1_v0_1_0_ge x;
|
rustsecp256k1_v0_1_1_ge x;
|
||||||
rustsecp256k1_v0_1_0_gej xj;
|
rustsecp256k1_v0_1_1_gej xj;
|
||||||
rustsecp256k1_v0_1_0_scalar rn, u1, u2;
|
rustsecp256k1_v0_1_1_scalar rn, u1, u2;
|
||||||
rustsecp256k1_v0_1_0_gej qj;
|
rustsecp256k1_v0_1_1_gej qj;
|
||||||
int r;
|
int r;
|
||||||
|
|
||||||
if (rustsecp256k1_v0_1_0_scalar_is_zero(sigr) || rustsecp256k1_v0_1_0_scalar_is_zero(sigs)) {
|
if (rustsecp256k1_v0_1_1_scalar_is_zero(sigr) || rustsecp256k1_v0_1_1_scalar_is_zero(sigs)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(brx, sigr);
|
rustsecp256k1_v0_1_1_scalar_get_b32(brx, sigr);
|
||||||
r = rustsecp256k1_v0_1_0_fe_set_b32(&fx, brx);
|
r = rustsecp256k1_v0_1_1_fe_set_b32(&fx, brx);
|
||||||
(void)r;
|
(void)r;
|
||||||
VERIFY_CHECK(r); /* brx comes from a scalar, so is less than the order; certainly less than p */
|
VERIFY_CHECK(r); /* brx comes from a scalar, so is less than the order; certainly less than p */
|
||||||
if (recid & 2) {
|
if (recid & 2) {
|
||||||
if (rustsecp256k1_v0_1_0_fe_cmp_var(&fx, &rustsecp256k1_v0_1_0_ecdsa_const_p_minus_order) >= 0) {
|
if (rustsecp256k1_v0_1_1_fe_cmp_var(&fx, &rustsecp256k1_v0_1_1_ecdsa_const_p_minus_order) >= 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_fe_add(&fx, &rustsecp256k1_v0_1_0_ecdsa_const_order_as_fe);
|
rustsecp256k1_v0_1_1_fe_add(&fx, &rustsecp256k1_v0_1_1_ecdsa_const_order_as_fe);
|
||||||
}
|
}
|
||||||
if (!rustsecp256k1_v0_1_0_ge_set_xo_var(&x, &fx, recid & 1)) {
|
if (!rustsecp256k1_v0_1_1_ge_set_xo_var(&x, &fx, recid & 1)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_gej_set_ge(&xj, &x);
|
rustsecp256k1_v0_1_1_gej_set_ge(&xj, &x);
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse_var(&rn, sigr);
|
rustsecp256k1_v0_1_1_scalar_inverse_var(&rn, sigr);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u1, &rn, message);
|
rustsecp256k1_v0_1_1_scalar_mul(&u1, &rn, message);
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&u1, &u1);
|
rustsecp256k1_v0_1_1_scalar_negate(&u1, &u1);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u2, &rn, sigs);
|
rustsecp256k1_v0_1_1_scalar_mul(&u2, &rn, sigs);
|
||||||
rustsecp256k1_v0_1_0_ecmult(ctx, &qj, &xj, &u2, &u1);
|
rustsecp256k1_v0_1_1_ecmult(ctx, &qj, &xj, &u2, &u1);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej_var(pubkey, &qj);
|
rustsecp256k1_v0_1_1_ge_set_gej_var(pubkey, &qj);
|
||||||
return !rustsecp256k1_v0_1_0_gej_is_infinity(&qj);
|
return !rustsecp256k1_v0_1_1_gej_is_infinity(&qj);
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_recoverable_signature *signature, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1_v0_1_0_nonce_function noncefp, const void* noncedata) {
|
int rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_recoverable_signature *signature, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1_v0_1_1_nonce_function noncefp, const void* noncedata) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
rustsecp256k1_v0_1_0_scalar sec, non, msg;
|
rustsecp256k1_v0_1_1_scalar sec, non, msg;
|
||||||
int recid;
|
int recid;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
||||||
ARG_CHECK(msg32 != NULL);
|
ARG_CHECK(msg32 != NULL);
|
||||||
ARG_CHECK(signature != NULL);
|
ARG_CHECK(signature != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
if (noncefp == NULL) {
|
if (noncefp == NULL) {
|
||||||
noncefp = rustsecp256k1_v0_1_0_nonce_function_default;
|
noncefp = rustsecp256k1_v0_1_1_nonce_function_default;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, &overflow);
|
||||||
/* Fail if the secret key is invalid. */
|
/* Fail if the secret key is invalid. */
|
||||||
if (!overflow && !rustsecp256k1_v0_1_0_scalar_is_zero(&sec)) {
|
if (!overflow && !rustsecp256k1_v0_1_1_scalar_is_zero(&sec)) {
|
||||||
unsigned char nonce32[32];
|
unsigned char nonce32[32];
|
||||||
unsigned int count = 0;
|
unsigned int count = 0;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&msg, msg32, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&msg, msg32, NULL);
|
||||||
while (1) {
|
while (1) {
|
||||||
ret = noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count);
|
ret = noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count);
|
||||||
if (!ret) {
|
if (!ret) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&non, nonce32, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&non, nonce32, &overflow);
|
||||||
if (!rustsecp256k1_v0_1_0_scalar_is_zero(&non) && !overflow) {
|
if (!rustsecp256k1_v0_1_1_scalar_is_zero(&non) && !overflow) {
|
||||||
if (rustsecp256k1_v0_1_0_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, &r, &s, &sec, &msg, &non, &recid)) {
|
if (rustsecp256k1_v0_1_1_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, &r, &s, &sec, &msg, &non, &recid)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
memset(nonce32, 0, 32);
|
memset(nonce32, 0, 32);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&msg);
|
rustsecp256k1_v0_1_1_scalar_clear(&msg);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&non);
|
rustsecp256k1_v0_1_1_scalar_clear(&non);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
}
|
}
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_save(signature, &r, &s, recid);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_save(signature, &r, &s, recid);
|
||||||
} else {
|
} else {
|
||||||
memset(signature, 0, sizeof(*signature));
|
memset(signature, 0, sizeof(*signature));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_recover(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubkey, const rustsecp256k1_v0_1_0_ecdsa_recoverable_signature *signature, const unsigned char *msg32) {
|
int rustsecp256k1_v0_1_1_ecdsa_recover(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubkey, const rustsecp256k1_v0_1_1_ecdsa_recoverable_signature *signature, const unsigned char *msg32) {
|
||||||
rustsecp256k1_v0_1_0_ge q;
|
rustsecp256k1_v0_1_1_ge q;
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
rustsecp256k1_v0_1_0_scalar m;
|
rustsecp256k1_v0_1_1_scalar m;
|
||||||
int recid;
|
int recid;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_context_is_built(&ctx->ecmult_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_context_is_built(&ctx->ecmult_ctx));
|
||||||
ARG_CHECK(msg32 != NULL);
|
ARG_CHECK(msg32 != NULL);
|
||||||
ARG_CHECK(signature != NULL);
|
ARG_CHECK(signature != NULL);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, signature);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, signature);
|
||||||
VERIFY_CHECK(recid >= 0 && recid < 4); /* should have been caught in parse_compact */
|
VERIFY_CHECK(recid >= 0 && recid < 4); /* should have been caught in parse_compact */
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&m, msg32, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&m, msg32, NULL);
|
||||||
if (rustsecp256k1_v0_1_0_ecdsa_sig_recover(&ctx->ecmult_ctx, &r, &s, &q, &m, recid)) {
|
if (rustsecp256k1_v0_1_1_ecdsa_sig_recover(&ctx->ecmult_ctx, &r, &s, &q, &m, recid)) {
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &q);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &q);
|
||||||
return 1;
|
return 1;
|
||||||
} else {
|
} else {
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
|
|
|
@ -25,19 +25,19 @@ static int recovery_test_nonce_function(unsigned char *nonce32, const unsigned c
|
||||||
}
|
}
|
||||||
/* On the next run, return a valid nonce, but flip a coin as to whether or not to fail signing. */
|
/* On the next run, return a valid nonce, but flip a coin as to whether or not to fail signing. */
|
||||||
memset(nonce32, 1, 32);
|
memset(nonce32, 1, 32);
|
||||||
return rustsecp256k1_v0_1_0_rand_bits(1);
|
return rustsecp256k1_v0_1_1_rand_bits(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_ecdsa_recovery_api(void) {
|
void test_ecdsa_recovery_api(void) {
|
||||||
/* Setup contexts that just count errors */
|
/* Setup contexts that just count errors */
|
||||||
rustsecp256k1_v0_1_0_context *none = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_NONE);
|
rustsecp256k1_v0_1_1_context *none = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_NONE);
|
||||||
rustsecp256k1_v0_1_0_context *sign = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN);
|
rustsecp256k1_v0_1_1_context *sign = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN);
|
||||||
rustsecp256k1_v0_1_0_context *vrfy = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_VERIFY);
|
rustsecp256k1_v0_1_1_context *vrfy = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_VERIFY);
|
||||||
rustsecp256k1_v0_1_0_context *both = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
rustsecp256k1_v0_1_1_context *both = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
rustsecp256k1_v0_1_0_pubkey recpubkey;
|
rustsecp256k1_v0_1_1_pubkey recpubkey;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature normal_sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature normal_sig;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature recsig;
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature recsig;
|
||||||
unsigned char privkey[32] = { 1 };
|
unsigned char privkey[32] = { 1 };
|
||||||
unsigned char message[32] = { 2 };
|
unsigned char message[32] = { 2 };
|
||||||
int32_t ecount = 0;
|
int32_t ecount = 0;
|
||||||
|
@ -49,159 +49,159 @@ void test_ecdsa_recovery_api(void) {
|
||||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff };
|
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff };
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_set_error_callback(none, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_error_callback(none, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_error_callback(sign, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_error_callback(sign, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_error_callback(vrfy, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_error_callback(vrfy, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_error_callback(both, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_error_callback(both, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_illegal_callback(none, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_illegal_callback(none, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_illegal_callback(sign, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_illegal_callback(sign, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_illegal_callback(vrfy, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_illegal_callback(vrfy, counting_illegal_callback_fn, &ecount);
|
||||||
rustsecp256k1_v0_1_0_context_set_illegal_callback(both, counting_illegal_callback_fn, &ecount);
|
rustsecp256k1_v0_1_1_context_set_illegal_callback(both, counting_illegal_callback_fn, &ecount);
|
||||||
|
|
||||||
/* Construct and verify corresponding public key. */
|
/* Construct and verify corresponding public key. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_seckey_verify(ctx, privkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_seckey_verify(ctx, privkey) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &pubkey, privkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &pubkey, privkey) == 1);
|
||||||
|
|
||||||
/* Check bad contexts and NULLs for signing */
|
/* Check bad contexts and NULLs for signing */
|
||||||
ecount = 0;
|
ecount = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(none, &recsig, message, privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(none, &recsig, message, privkey, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(sign, &recsig, message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(sign, &recsig, message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(vrfy, &recsig, message, privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(vrfy, &recsig, message, privkey, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, NULL, message, privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, NULL, message, privkey, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 3);
|
CHECK(ecount == 3);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, NULL, privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, NULL, privkey, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 4);
|
CHECK(ecount == 4);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, NULL, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, NULL, NULL, NULL) == 0);
|
||||||
CHECK(ecount == 5);
|
CHECK(ecount == 5);
|
||||||
/* This will fail or succeed randomly, and in either case will not ARG_CHECK failure */
|
/* This will fail or succeed randomly, and in either case will not ARG_CHECK failure */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, privkey, recovery_test_nonce_function, NULL);
|
rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, privkey, recovery_test_nonce_function, NULL);
|
||||||
CHECK(ecount == 5);
|
CHECK(ecount == 5);
|
||||||
/* These will all fail, but not in ARG_CHECK way */
|
/* These will all fail, but not in ARG_CHECK way */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, zero_privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, zero_privkey, NULL, NULL) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, over_privkey, NULL, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, over_privkey, NULL, NULL) == 0);
|
||||||
/* This one will succeed. */
|
/* This one will succeed. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(ecount == 5);
|
CHECK(ecount == 5);
|
||||||
|
|
||||||
/* Check signing with a goofy nonce function */
|
/* Check signing with a goofy nonce function */
|
||||||
|
|
||||||
/* Check bad contexts and NULLs for recovery */
|
/* Check bad contexts and NULLs for recovery */
|
||||||
ecount = 0;
|
ecount = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(none, &recpubkey, &recsig, message) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(none, &recpubkey, &recsig, message) == 0);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(sign, &recpubkey, &recsig, message) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(sign, &recpubkey, &recsig, message) == 0);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(vrfy, &recpubkey, &recsig, message) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(vrfy, &recpubkey, &recsig, message) == 1);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(both, &recpubkey, &recsig, message) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(both, &recpubkey, &recsig, message) == 1);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(both, NULL, &recsig, message) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(both, NULL, &recsig, message) == 0);
|
||||||
CHECK(ecount == 3);
|
CHECK(ecount == 3);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(both, &recpubkey, NULL, message) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(both, &recpubkey, NULL, message) == 0);
|
||||||
CHECK(ecount == 4);
|
CHECK(ecount == 4);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(both, &recpubkey, &recsig, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(both, &recpubkey, &recsig, NULL) == 0);
|
||||||
CHECK(ecount == 5);
|
CHECK(ecount == 5);
|
||||||
|
|
||||||
/* Check NULLs for conversion */
|
/* Check NULLs for conversion */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign(both, &normal_sig, message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign(both, &normal_sig, message, privkey, NULL, NULL) == 1);
|
||||||
ecount = 0;
|
ecount = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(both, NULL, &recsig) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(both, NULL, &recsig) == 0);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(both, &normal_sig, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(both, &normal_sig, NULL) == 0);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(both, &normal_sig, &recsig) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(both, &normal_sig, &recsig) == 1);
|
||||||
|
|
||||||
/* Check NULLs for de/serialization */
|
/* Check NULLs for de/serialization */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(both, &recsig, message, privkey, NULL, NULL) == 1);
|
||||||
ecount = 0;
|
ecount = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(both, NULL, &recid, &recsig) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(both, NULL, &recid, &recsig) == 0);
|
||||||
CHECK(ecount == 1);
|
CHECK(ecount == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(both, sig, NULL, &recsig) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(both, sig, NULL, &recsig) == 0);
|
||||||
CHECK(ecount == 2);
|
CHECK(ecount == 2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(both, sig, &recid, NULL) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(both, sig, &recid, NULL) == 0);
|
||||||
CHECK(ecount == 3);
|
CHECK(ecount == 3);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(both, sig, &recid, &recsig) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(both, sig, &recid, &recsig) == 1);
|
||||||
|
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(both, NULL, sig, recid) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(both, NULL, sig, recid) == 0);
|
||||||
CHECK(ecount == 4);
|
CHECK(ecount == 4);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(both, &recsig, NULL, recid) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(both, &recsig, NULL, recid) == 0);
|
||||||
CHECK(ecount == 5);
|
CHECK(ecount == 5);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, -1) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, -1) == 0);
|
||||||
CHECK(ecount == 6);
|
CHECK(ecount == 6);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, 5) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, 5) == 0);
|
||||||
CHECK(ecount == 7);
|
CHECK(ecount == 7);
|
||||||
/* overflow in signature will fail but not affect ecount */
|
/* overflow in signature will fail but not affect ecount */
|
||||||
memcpy(sig, over_privkey, 32);
|
memcpy(sig, over_privkey, 32);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, recid) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(both, &recsig, sig, recid) == 0);
|
||||||
CHECK(ecount == 7);
|
CHECK(ecount == 7);
|
||||||
|
|
||||||
/* cleanup */
|
/* cleanup */
|
||||||
rustsecp256k1_v0_1_0_context_destroy(none);
|
rustsecp256k1_v0_1_1_context_destroy(none);
|
||||||
rustsecp256k1_v0_1_0_context_destroy(sign);
|
rustsecp256k1_v0_1_1_context_destroy(sign);
|
||||||
rustsecp256k1_v0_1_0_context_destroy(vrfy);
|
rustsecp256k1_v0_1_1_context_destroy(vrfy);
|
||||||
rustsecp256k1_v0_1_0_context_destroy(both);
|
rustsecp256k1_v0_1_1_context_destroy(both);
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_ecdsa_recovery_end_to_end(void) {
|
void test_ecdsa_recovery_end_to_end(void) {
|
||||||
unsigned char extra[32] = {0x00};
|
unsigned char extra[32] = {0x00};
|
||||||
unsigned char privkey[32];
|
unsigned char privkey[32];
|
||||||
unsigned char message[32];
|
unsigned char message[32];
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature signature[5];
|
rustsecp256k1_v0_1_1_ecdsa_signature signature[5];
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature rsignature[5];
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature rsignature[5];
|
||||||
unsigned char sig[74];
|
unsigned char sig[74];
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
rustsecp256k1_v0_1_0_pubkey recpubkey;
|
rustsecp256k1_v0_1_1_pubkey recpubkey;
|
||||||
int recid = 0;
|
int recid = 0;
|
||||||
|
|
||||||
/* Generate a random key and message. */
|
/* Generate a random key and message. */
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_scalar msg, key;
|
rustsecp256k1_v0_1_1_scalar msg, key;
|
||||||
random_scalar_order_test(&msg);
|
random_scalar_order_test(&msg);
|
||||||
random_scalar_order_test(&key);
|
random_scalar_order_test(&key);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(privkey, &key);
|
rustsecp256k1_v0_1_1_scalar_get_b32(privkey, &key);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(message, &msg);
|
rustsecp256k1_v0_1_1_scalar_get_b32(message, &msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Construct and verify corresponding public key. */
|
/* Construct and verify corresponding public key. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_seckey_verify(ctx, privkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_seckey_verify(ctx, privkey) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ec_pubkey_create(ctx, &pubkey, privkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ec_pubkey_create(ctx, &pubkey, privkey) == 1);
|
||||||
|
|
||||||
/* Serialize/parse compact and verify/recover. */
|
/* Serialize/parse compact and verify/recover. */
|
||||||
extra[0] = 0;
|
extra[0] = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsignature[0], message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsignature[0], message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign(ctx, &signature[0], message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign(ctx, &signature[0], message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsignature[4], message, privkey, NULL, NULL) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsignature[4], message, privkey, NULL, NULL) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsignature[1], message, privkey, NULL, extra) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsignature[1], message, privkey, NULL, extra) == 1);
|
||||||
extra[31] = 1;
|
extra[31] = 1;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsignature[2], message, privkey, NULL, extra) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsignature[2], message, privkey, NULL, extra) == 1);
|
||||||
extra[31] = 0;
|
extra[31] = 0;
|
||||||
extra[0] = 1;
|
extra[0] = 1;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsignature[3], message, privkey, NULL, extra) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsignature[3], message, privkey, NULL, extra) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &rsignature[4]) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &rsignature[4]) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
||||||
CHECK(memcmp(&signature[4], &signature[0], 64) == 0);
|
CHECK(memcmp(&signature[4], &signature[0], 64) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 1);
|
||||||
memset(&rsignature[4], 0, sizeof(rsignature[4]));
|
memset(&rsignature[4], 0, sizeof(rsignature[4]));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 1);
|
||||||
/* Parse compact (with recovery id) and recover. */
|
/* Parse compact (with recovery id) and recover. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &recpubkey, &rsignature[4], message) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &recpubkey, &rsignature[4], message) == 1);
|
||||||
CHECK(memcmp(&pubkey, &recpubkey, sizeof(pubkey)) == 0);
|
CHECK(memcmp(&pubkey, &recpubkey, sizeof(pubkey)) == 0);
|
||||||
/* Serialize/destroy/parse signature and verify again. */
|
/* Serialize/destroy/parse signature and verify again. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &rsignature[4]) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &rsignature[4]) == 1);
|
||||||
sig[rustsecp256k1_v0_1_0_rand_bits(6)] += 1 + rustsecp256k1_v0_1_0_rand_int(255);
|
sig[rustsecp256k1_v0_1_1_rand_bits(6)] += 1 + rustsecp256k1_v0_1_1_rand_int(255);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsignature[4], sig, recid) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(ctx, &signature[4], &rsignature[4]) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &signature[4], message, &pubkey) == 0);
|
||||||
/* Recover again */
|
/* Recover again */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &recpubkey, &rsignature[4], message) == 0 ||
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &recpubkey, &rsignature[4], message) == 0 ||
|
||||||
memcmp(&pubkey, &recpubkey, sizeof(pubkey)) != 0);
|
memcmp(&pubkey, &recpubkey, sizeof(pubkey)) != 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ void test_ecdsa_recovery_edge_cases(void) {
|
||||||
0x7D, 0xD7, 0x3E, 0x38, 0x7E, 0xE4, 0xFC, 0x86,
|
0x7D, 0xD7, 0x3E, 0x38, 0x7E, 0xE4, 0xFC, 0x86,
|
||||||
0x6E, 0x1B, 0xE8, 0xEC, 0xC7, 0xDD, 0x95, 0x57
|
0x6E, 0x1B, 0xE8, 0xEC, 0xC7, 0xDD, 0x95, 0x57
|
||||||
};
|
};
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey;
|
rustsecp256k1_v0_1_1_pubkey pubkey;
|
||||||
/* signature (r,s) = (4,4), which can be recovered with all 4 recids. */
|
/* signature (r,s) = (4,4), which can be recovered with all 4 recids. */
|
||||||
const unsigned char sigb64[64] = {
|
const unsigned char sigb64[64] = {
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
@ -237,19 +237,19 @@ void test_ecdsa_recovery_edge_cases(void) {
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
|
||||||
};
|
};
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkeyb;
|
rustsecp256k1_v0_1_1_pubkey pubkeyb;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature rsig;
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature rsig;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
int recid;
|
int recid;
|
||||||
|
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 0));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 0));
|
||||||
CHECK(!rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
CHECK(!rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 1));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 1));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 2));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 2));
|
||||||
CHECK(!rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
CHECK(!rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 3));
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sig64, 3));
|
||||||
CHECK(!rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
CHECK(!rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkey, &rsig, msg32));
|
||||||
|
|
||||||
for (recid = 0; recid < 4; recid++) {
|
for (recid = 0; recid < 4; recid++) {
|
||||||
int i;
|
int i;
|
||||||
|
@ -294,40 +294,40 @@ void test_ecdsa_recovery_edge_cases(void) {
|
||||||
0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E,
|
0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E,
|
||||||
0x8C, 0xD0, 0x36, 0x41, 0x45, 0x02, 0x01, 0x04
|
0x8C, 0xD0, 0x36, 0x41, 0x45, 0x02, 0x01, 0x04
|
||||||
};
|
};
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigb64, recid) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigb64, recid) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 1);
|
||||||
for (recid2 = 0; recid2 < 4; recid2++) {
|
for (recid2 = 0; recid2 < 4; recid2++) {
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkey2b;
|
rustsecp256k1_v0_1_1_pubkey pubkey2b;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigb64, recid2) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigb64, recid2) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkey2b, &rsig, msg32) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkey2b, &rsig, msg32) == 1);
|
||||||
/* Verifying with (order + r,4) should always fail. */
|
/* Verifying with (order + r,4) should always fail. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderlong, sizeof(sigbderlong)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderlong, sizeof(sigbderlong)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
||||||
}
|
}
|
||||||
/* DER parsing tests. */
|
/* DER parsing tests. */
|
||||||
/* Zero length r/s. */
|
/* Zero length r/s. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigcder_zr, sizeof(sigcder_zr)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigcder_zr, sizeof(sigcder_zr)) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigcder_zs, sizeof(sigcder_zs)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigcder_zs, sizeof(sigcder_zs)) == 0);
|
||||||
/* Leading zeros. */
|
/* Leading zeros. */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt1, sizeof(sigbderalt1)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt1, sizeof(sigbderalt1)) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt2, sizeof(sigbderalt2)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt2, sizeof(sigbderalt2)) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt3, sizeof(sigbderalt3)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt3, sizeof(sigbderalt3)) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt4, sizeof(sigbderalt4)) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt4, sizeof(sigbderalt4)) == 0);
|
||||||
sigbderalt3[4] = 1;
|
sigbderalt3[4] = 1;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt3, sizeof(sigbderalt3)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt3, sizeof(sigbderalt3)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
||||||
sigbderalt4[7] = 1;
|
sigbderalt4[7] = 1;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbderalt4, sizeof(sigbderalt4)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbderalt4, sizeof(sigbderalt4)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
||||||
/* Damage signature. */
|
/* Damage signature. */
|
||||||
sigbder[7]++;
|
sigbder[7]++;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
||||||
sigbder[7]--;
|
sigbder[7]--;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbder, 6) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbder, 6) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder) - 1) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder) - 1) == 0);
|
||||||
for(i = 0; i < 8; i++) {
|
for(i = 0; i < 8; i++) {
|
||||||
int c;
|
int c;
|
||||||
unsigned char orig = sigbder[i];
|
unsigned char orig = sigbder[i];
|
||||||
|
@ -337,7 +337,7 @@ void test_ecdsa_recovery_edge_cases(void) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
sigbder[i] = c;
|
sigbder[i] = c;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 0 || rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigbder, sizeof(sigbder)) == 0 || rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyb) == 0);
|
||||||
}
|
}
|
||||||
sigbder[i] = orig;
|
sigbder[i] = orig;
|
||||||
}
|
}
|
||||||
|
@ -357,25 +357,25 @@ void test_ecdsa_recovery_edge_cases(void) {
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
|
||||||
};
|
};
|
||||||
rustsecp256k1_v0_1_0_pubkey pubkeyc;
|
rustsecp256k1_v0_1_1_pubkey pubkeyc;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkeyc, &rsig, msg32) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkeyc, &rsig, msg32) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 1);
|
||||||
sigcder[4] = 0;
|
sigcder[4] = 0;
|
||||||
sigc64[31] = 0;
|
sigc64[31] = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 0);
|
||||||
sigcder[4] = 1;
|
sigcder[4] = 1;
|
||||||
sigcder[7] = 0;
|
sigcder[7] = 0;
|
||||||
sigc64[31] = 1;
|
sigc64[31] = 1;
|
||||||
sigc64[63] = 0;
|
sigc64[63] = 0;
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact(ctx, &rsig, sigc64, 0) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_recover(ctx, &pubkeyb, &rsig, msg32) == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(ctx, &sig, sigcder, sizeof(sigcder)) == 1);
|
||||||
CHECK(rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 0);
|
CHECK(rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pubkeyc) == 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,54 +20,54 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Copy a number. */
|
/** Copy a number. */
|
||||||
static void rustsecp256k1_v0_1_0_num_copy(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a);
|
static void rustsecp256k1_v0_1_1_num_copy(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a);
|
||||||
|
|
||||||
/** Convert a number's absolute value to a binary big-endian string.
|
/** Convert a number's absolute value to a binary big-endian string.
|
||||||
* There must be enough place. */
|
* There must be enough place. */
|
||||||
static void rustsecp256k1_v0_1_0_num_get_bin(unsigned char *r, unsigned int rlen, const rustsecp256k1_v0_1_0_num *a);
|
static void rustsecp256k1_v0_1_1_num_get_bin(unsigned char *r, unsigned int rlen, const rustsecp256k1_v0_1_1_num *a);
|
||||||
|
|
||||||
/** Set a number to the value of a binary big-endian string. */
|
/** Set a number to the value of a binary big-endian string. */
|
||||||
static void rustsecp256k1_v0_1_0_num_set_bin(rustsecp256k1_v0_1_0_num *r, const unsigned char *a, unsigned int alen);
|
static void rustsecp256k1_v0_1_1_num_set_bin(rustsecp256k1_v0_1_1_num *r, const unsigned char *a, unsigned int alen);
|
||||||
|
|
||||||
/** Compute a modular inverse. The input must be less than the modulus. */
|
/** Compute a modular inverse. The input must be less than the modulus. */
|
||||||
static void rustsecp256k1_v0_1_0_num_mod_inverse(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *m);
|
static void rustsecp256k1_v0_1_1_num_mod_inverse(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *m);
|
||||||
|
|
||||||
/** Compute the jacobi symbol (a|b). b must be positive and odd. */
|
/** Compute the jacobi symbol (a|b). b must be positive and odd. */
|
||||||
static int rustsecp256k1_v0_1_0_num_jacobi(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static int rustsecp256k1_v0_1_1_num_jacobi(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Compare the absolute value of two numbers. */
|
/** Compare the absolute value of two numbers. */
|
||||||
static int rustsecp256k1_v0_1_0_num_cmp(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static int rustsecp256k1_v0_1_1_num_cmp(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Test whether two number are equal (including sign). */
|
/** Test whether two number are equal (including sign). */
|
||||||
static int rustsecp256k1_v0_1_0_num_eq(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static int rustsecp256k1_v0_1_1_num_eq(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Add two (signed) numbers. */
|
/** Add two (signed) numbers. */
|
||||||
static void rustsecp256k1_v0_1_0_num_add(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static void rustsecp256k1_v0_1_1_num_add(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Subtract two (signed) numbers. */
|
/** Subtract two (signed) numbers. */
|
||||||
static void rustsecp256k1_v0_1_0_num_sub(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static void rustsecp256k1_v0_1_1_num_sub(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Multiply two (signed) numbers. */
|
/** Multiply two (signed) numbers. */
|
||||||
static void rustsecp256k1_v0_1_0_num_mul(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b);
|
static void rustsecp256k1_v0_1_1_num_mul(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b);
|
||||||
|
|
||||||
/** Replace a number by its remainder modulo m. M's sign is ignored. The result is a number between 0 and m-1,
|
/** Replace a number by its remainder modulo m. M's sign is ignored. The result is a number between 0 and m-1,
|
||||||
even if r was negative. */
|
even if r was negative. */
|
||||||
static void rustsecp256k1_v0_1_0_num_mod(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *m);
|
static void rustsecp256k1_v0_1_1_num_mod(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *m);
|
||||||
|
|
||||||
/** Right-shift the passed number by bits bits. */
|
/** Right-shift the passed number by bits bits. */
|
||||||
static void rustsecp256k1_v0_1_0_num_shift(rustsecp256k1_v0_1_0_num *r, int bits);
|
static void rustsecp256k1_v0_1_1_num_shift(rustsecp256k1_v0_1_1_num *r, int bits);
|
||||||
|
|
||||||
/** Check whether a number is zero. */
|
/** Check whether a number is zero. */
|
||||||
static int rustsecp256k1_v0_1_0_num_is_zero(const rustsecp256k1_v0_1_0_num *a);
|
static int rustsecp256k1_v0_1_1_num_is_zero(const rustsecp256k1_v0_1_1_num *a);
|
||||||
|
|
||||||
/** Check whether a number is one. */
|
/** Check whether a number is one. */
|
||||||
static int rustsecp256k1_v0_1_0_num_is_one(const rustsecp256k1_v0_1_0_num *a);
|
static int rustsecp256k1_v0_1_1_num_is_one(const rustsecp256k1_v0_1_1_num *a);
|
||||||
|
|
||||||
/** Check whether a number is strictly negative. */
|
/** Check whether a number is strictly negative. */
|
||||||
static int rustsecp256k1_v0_1_0_num_is_neg(const rustsecp256k1_v0_1_0_num *a);
|
static int rustsecp256k1_v0_1_1_num_is_neg(const rustsecp256k1_v0_1_1_num *a);
|
||||||
|
|
||||||
/** Change a number's sign. */
|
/** Change a number's sign. */
|
||||||
static void rustsecp256k1_v0_1_0_num_negate(rustsecp256k1_v0_1_0_num *r);
|
static void rustsecp256k1_v0_1_1_num_negate(rustsecp256k1_v0_1_1_num *r);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,6 @@ typedef struct {
|
||||||
mp_limb_t data[2*NUM_LIMBS];
|
mp_limb_t data[2*NUM_LIMBS];
|
||||||
int neg;
|
int neg;
|
||||||
int limbs;
|
int limbs;
|
||||||
} rustsecp256k1_v0_1_0_num;
|
} rustsecp256k1_v0_1_1_num;
|
||||||
|
|
||||||
#endif /* SECP256K1_NUM_REPR_H */
|
#endif /* SECP256K1_NUM_REPR_H */
|
||||||
|
|
|
@ -15,18 +15,18 @@
|
||||||
#include "num.h"
|
#include "num.h"
|
||||||
|
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
static void rustsecp256k1_v0_1_0_num_sanity(const rustsecp256k1_v0_1_0_num *a) {
|
static void rustsecp256k1_v0_1_1_num_sanity(const rustsecp256k1_v0_1_1_num *a) {
|
||||||
VERIFY_CHECK(a->limbs == 1 || (a->limbs > 1 && a->data[a->limbs-1] != 0));
|
VERIFY_CHECK(a->limbs == 1 || (a->limbs > 1 && a->data[a->limbs-1] != 0));
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
#define rustsecp256k1_v0_1_0_num_sanity(a) do { } while(0)
|
#define rustsecp256k1_v0_1_1_num_sanity(a) do { } while(0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_copy(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a) {
|
static void rustsecp256k1_v0_1_1_num_copy(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a) {
|
||||||
*r = *a;
|
*r = *a;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_get_bin(unsigned char *r, unsigned int rlen, const rustsecp256k1_v0_1_0_num *a) {
|
static void rustsecp256k1_v0_1_1_num_get_bin(unsigned char *r, unsigned int rlen, const rustsecp256k1_v0_1_1_num *a) {
|
||||||
unsigned char tmp[65];
|
unsigned char tmp[65];
|
||||||
int len = 0;
|
int len = 0;
|
||||||
int shift = 0;
|
int shift = 0;
|
||||||
|
@ -42,7 +42,7 @@ static void rustsecp256k1_v0_1_0_num_get_bin(unsigned char *r, unsigned int rlen
|
||||||
memset(tmp, 0, sizeof(tmp));
|
memset(tmp, 0, sizeof(tmp));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_set_bin(rustsecp256k1_v0_1_0_num *r, const unsigned char *a, unsigned int alen) {
|
static void rustsecp256k1_v0_1_1_num_set_bin(rustsecp256k1_v0_1_1_num *r, const unsigned char *a, unsigned int alen) {
|
||||||
int len;
|
int len;
|
||||||
VERIFY_CHECK(alen > 0);
|
VERIFY_CHECK(alen > 0);
|
||||||
VERIFY_CHECK(alen <= 64);
|
VERIFY_CHECK(alen <= 64);
|
||||||
|
@ -59,7 +59,7 @@ static void rustsecp256k1_v0_1_0_num_set_bin(rustsecp256k1_v0_1_0_num *r, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_add_abs(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static void rustsecp256k1_v0_1_1_num_add_abs(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
mp_limb_t c = mpn_add(r->data, a->data, a->limbs, b->data, b->limbs);
|
mp_limb_t c = mpn_add(r->data, a->data, a->limbs, b->data, b->limbs);
|
||||||
r->limbs = a->limbs;
|
r->limbs = a->limbs;
|
||||||
if (c != 0) {
|
if (c != 0) {
|
||||||
|
@ -68,7 +68,7 @@ static void rustsecp256k1_v0_1_0_num_add_abs(rustsecp256k1_v0_1_0_num *r, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_sub_abs(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static void rustsecp256k1_v0_1_1_num_sub_abs(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
mp_limb_t c = mpn_sub(r->data, a->data, a->limbs, b->data, b->limbs);
|
mp_limb_t c = mpn_sub(r->data, a->data, a->limbs, b->data, b->limbs);
|
||||||
(void)c;
|
(void)c;
|
||||||
VERIFY_CHECK(c == 0);
|
VERIFY_CHECK(c == 0);
|
||||||
|
@ -78,9 +78,9 @@ static void rustsecp256k1_v0_1_0_num_sub_abs(rustsecp256k1_v0_1_0_num *r, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_mod(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *m) {
|
static void rustsecp256k1_v0_1_1_num_mod(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *m) {
|
||||||
rustsecp256k1_v0_1_0_num_sanity(r);
|
rustsecp256k1_v0_1_1_num_sanity(r);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(m);
|
rustsecp256k1_v0_1_1_num_sanity(m);
|
||||||
|
|
||||||
if (r->limbs >= m->limbs) {
|
if (r->limbs >= m->limbs) {
|
||||||
mp_limb_t t[2*NUM_LIMBS];
|
mp_limb_t t[2*NUM_LIMBS];
|
||||||
|
@ -93,20 +93,20 @@ static void rustsecp256k1_v0_1_0_num_mod(rustsecp256k1_v0_1_0_num *r, const rust
|
||||||
}
|
}
|
||||||
|
|
||||||
if (r->neg && (r->limbs > 1 || r->data[0] != 0)) {
|
if (r->neg && (r->limbs > 1 || r->data[0] != 0)) {
|
||||||
rustsecp256k1_v0_1_0_num_sub_abs(r, m, r);
|
rustsecp256k1_v0_1_1_num_sub_abs(r, m, r);
|
||||||
r->neg = 0;
|
r->neg = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_mod_inverse(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *m) {
|
static void rustsecp256k1_v0_1_1_num_mod_inverse(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *m) {
|
||||||
int i;
|
int i;
|
||||||
mp_limb_t g[NUM_LIMBS+1];
|
mp_limb_t g[NUM_LIMBS+1];
|
||||||
mp_limb_t u[NUM_LIMBS+1];
|
mp_limb_t u[NUM_LIMBS+1];
|
||||||
mp_limb_t v[NUM_LIMBS+1];
|
mp_limb_t v[NUM_LIMBS+1];
|
||||||
mp_size_t sn;
|
mp_size_t sn;
|
||||||
mp_size_t gn;
|
mp_size_t gn;
|
||||||
rustsecp256k1_v0_1_0_num_sanity(a);
|
rustsecp256k1_v0_1_1_num_sanity(a);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(m);
|
rustsecp256k1_v0_1_1_num_sanity(m);
|
||||||
|
|
||||||
/** mpn_gcdext computes: (G,S) = gcdext(U,V), where
|
/** mpn_gcdext computes: (G,S) = gcdext(U,V), where
|
||||||
* * G = gcd(U,V)
|
* * G = gcd(U,V)
|
||||||
|
@ -144,11 +144,11 @@ static void rustsecp256k1_v0_1_0_num_mod_inverse(rustsecp256k1_v0_1_0_num *r, co
|
||||||
memset(v, 0, sizeof(v));
|
memset(v, 0, sizeof(v));
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_jacobi(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static int rustsecp256k1_v0_1_1_num_jacobi(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
int ret;
|
int ret;
|
||||||
mpz_t ga, gb;
|
mpz_t ga, gb;
|
||||||
rustsecp256k1_v0_1_0_num_sanity(a);
|
rustsecp256k1_v0_1_1_num_sanity(a);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(b);
|
rustsecp256k1_v0_1_1_num_sanity(b);
|
||||||
VERIFY_CHECK(!b->neg && (b->limbs > 0) && (b->data[0] & 1));
|
VERIFY_CHECK(!b->neg && (b->limbs > 0) && (b->data[0] & 1));
|
||||||
|
|
||||||
mpz_inits(ga, gb, NULL);
|
mpz_inits(ga, gb, NULL);
|
||||||
|
@ -166,19 +166,19 @@ static int rustsecp256k1_v0_1_0_num_jacobi(const rustsecp256k1_v0_1_0_num *a, co
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_is_one(const rustsecp256k1_v0_1_0_num *a) {
|
static int rustsecp256k1_v0_1_1_num_is_one(const rustsecp256k1_v0_1_1_num *a) {
|
||||||
return (a->limbs == 1 && a->data[0] == 1);
|
return (a->limbs == 1 && a->data[0] == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_is_zero(const rustsecp256k1_v0_1_0_num *a) {
|
static int rustsecp256k1_v0_1_1_num_is_zero(const rustsecp256k1_v0_1_1_num *a) {
|
||||||
return (a->limbs == 1 && a->data[0] == 0);
|
return (a->limbs == 1 && a->data[0] == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_is_neg(const rustsecp256k1_v0_1_0_num *a) {
|
static int rustsecp256k1_v0_1_1_num_is_neg(const rustsecp256k1_v0_1_1_num *a) {
|
||||||
return (a->limbs > 1 || a->data[0] != 0) && a->neg;
|
return (a->limbs > 1 || a->data[0] != 0) && a->neg;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_cmp(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static int rustsecp256k1_v0_1_1_num_cmp(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
if (a->limbs > b->limbs) {
|
if (a->limbs > b->limbs) {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
@ -188,54 +188,54 @@ static int rustsecp256k1_v0_1_0_num_cmp(const rustsecp256k1_v0_1_0_num *a, const
|
||||||
return mpn_cmp(a->data, b->data, a->limbs);
|
return mpn_cmp(a->data, b->data, a->limbs);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_num_eq(const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static int rustsecp256k1_v0_1_1_num_eq(const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
if (a->limbs > b->limbs) {
|
if (a->limbs > b->limbs) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (a->limbs < b->limbs) {
|
if (a->limbs < b->limbs) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if ((a->neg && !rustsecp256k1_v0_1_0_num_is_zero(a)) != (b->neg && !rustsecp256k1_v0_1_0_num_is_zero(b))) {
|
if ((a->neg && !rustsecp256k1_v0_1_1_num_is_zero(a)) != (b->neg && !rustsecp256k1_v0_1_1_num_is_zero(b))) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return mpn_cmp(a->data, b->data, a->limbs) == 0;
|
return mpn_cmp(a->data, b->data, a->limbs) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_subadd(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b, int bneg) {
|
static void rustsecp256k1_v0_1_1_num_subadd(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b, int bneg) {
|
||||||
if (!(b->neg ^ bneg ^ a->neg)) { /* a and b have the same sign */
|
if (!(b->neg ^ bneg ^ a->neg)) { /* a and b have the same sign */
|
||||||
r->neg = a->neg;
|
r->neg = a->neg;
|
||||||
if (a->limbs >= b->limbs) {
|
if (a->limbs >= b->limbs) {
|
||||||
rustsecp256k1_v0_1_0_num_add_abs(r, a, b);
|
rustsecp256k1_v0_1_1_num_add_abs(r, a, b);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_num_add_abs(r, b, a);
|
rustsecp256k1_v0_1_1_num_add_abs(r, b, a);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (rustsecp256k1_v0_1_0_num_cmp(a, b) > 0) {
|
if (rustsecp256k1_v0_1_1_num_cmp(a, b) > 0) {
|
||||||
r->neg = a->neg;
|
r->neg = a->neg;
|
||||||
rustsecp256k1_v0_1_0_num_sub_abs(r, a, b);
|
rustsecp256k1_v0_1_1_num_sub_abs(r, a, b);
|
||||||
} else {
|
} else {
|
||||||
r->neg = b->neg ^ bneg;
|
r->neg = b->neg ^ bneg;
|
||||||
rustsecp256k1_v0_1_0_num_sub_abs(r, b, a);
|
rustsecp256k1_v0_1_1_num_sub_abs(r, b, a);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_add(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static void rustsecp256k1_v0_1_1_num_add(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
rustsecp256k1_v0_1_0_num_sanity(a);
|
rustsecp256k1_v0_1_1_num_sanity(a);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(b);
|
rustsecp256k1_v0_1_1_num_sanity(b);
|
||||||
rustsecp256k1_v0_1_0_num_subadd(r, a, b, 0);
|
rustsecp256k1_v0_1_1_num_subadd(r, a, b, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_sub(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static void rustsecp256k1_v0_1_1_num_sub(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
rustsecp256k1_v0_1_0_num_sanity(a);
|
rustsecp256k1_v0_1_1_num_sanity(a);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(b);
|
rustsecp256k1_v0_1_1_num_sanity(b);
|
||||||
rustsecp256k1_v0_1_0_num_subadd(r, a, b, 1);
|
rustsecp256k1_v0_1_1_num_subadd(r, a, b, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_mul(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_num *a, const rustsecp256k1_v0_1_0_num *b) {
|
static void rustsecp256k1_v0_1_1_num_mul(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_num *a, const rustsecp256k1_v0_1_1_num *b) {
|
||||||
mp_limb_t tmp[2*NUM_LIMBS+1];
|
mp_limb_t tmp[2*NUM_LIMBS+1];
|
||||||
rustsecp256k1_v0_1_0_num_sanity(a);
|
rustsecp256k1_v0_1_1_num_sanity(a);
|
||||||
rustsecp256k1_v0_1_0_num_sanity(b);
|
rustsecp256k1_v0_1_1_num_sanity(b);
|
||||||
|
|
||||||
VERIFY_CHECK(a->limbs + b->limbs <= 2*NUM_LIMBS+1);
|
VERIFY_CHECK(a->limbs + b->limbs <= 2*NUM_LIMBS+1);
|
||||||
if ((a->limbs==1 && a->data[0]==0) || (b->limbs==1 && b->data[0]==0)) {
|
if ((a->limbs==1 && a->data[0]==0) || (b->limbs==1 && b->data[0]==0)) {
|
||||||
|
@ -259,7 +259,7 @@ static void rustsecp256k1_v0_1_0_num_mul(rustsecp256k1_v0_1_0_num *r, const rust
|
||||||
memset(tmp, 0, sizeof(tmp));
|
memset(tmp, 0, sizeof(tmp));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_shift(rustsecp256k1_v0_1_0_num *r, int bits) {
|
static void rustsecp256k1_v0_1_1_num_shift(rustsecp256k1_v0_1_1_num *r, int bits) {
|
||||||
if (bits % GMP_NUMB_BITS) {
|
if (bits % GMP_NUMB_BITS) {
|
||||||
/* Shift within limbs. */
|
/* Shift within limbs. */
|
||||||
mpn_rshift(r->data, r->data, r->limbs, bits % GMP_NUMB_BITS);
|
mpn_rshift(r->data, r->data, r->limbs, bits % GMP_NUMB_BITS);
|
||||||
|
@ -281,7 +281,7 @@ static void rustsecp256k1_v0_1_0_num_shift(rustsecp256k1_v0_1_0_num *r, int bits
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_num_negate(rustsecp256k1_v0_1_0_num *r) {
|
static void rustsecp256k1_v0_1_1_num_negate(rustsecp256k1_v0_1_1_num *r) {
|
||||||
r->neg ^= 1;
|
r->neg ^= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,83 +24,83 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Clear a scalar to prevent the leak of sensitive data. */
|
/** Clear a scalar to prevent the leak of sensitive data. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_1_0_scalar *r);
|
static void rustsecp256k1_v0_1_1_scalar_clear(rustsecp256k1_v0_1_1_scalar *r);
|
||||||
|
|
||||||
/** Access bits from a scalar. All requested bits must belong to the same 32-bit limb. */
|
/** Access bits from a scalar. All requested bits must belong to the same 32-bit limb. */
|
||||||
static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count);
|
static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count);
|
||||||
|
|
||||||
/** Access bits from a scalar. Not constant time. */
|
/** Access bits from a scalar. Not constant time. */
|
||||||
static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits_var(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count);
|
static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits_var(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count);
|
||||||
|
|
||||||
/** Set a scalar from a big endian byte array. */
|
/** Set a scalar from a big endian byte array. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r, const unsigned char *bin, int *overflow);
|
static void rustsecp256k1_v0_1_1_scalar_set_b32(rustsecp256k1_v0_1_1_scalar *r, const unsigned char *bin, int *overflow);
|
||||||
|
|
||||||
/** Set a scalar to an unsigned integer. */
|
/** Set a scalar to an unsigned integer. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v0_1_0_scalar *r, unsigned int v);
|
static void rustsecp256k1_v0_1_1_scalar_set_int(rustsecp256k1_v0_1_1_scalar *r, unsigned int v);
|
||||||
|
|
||||||
/** Convert a scalar to a byte array. */
|
/** Convert a scalar to a byte array. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_0_scalar* a);
|
static void rustsecp256k1_v0_1_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_1_scalar* a);
|
||||||
|
|
||||||
/** Add two scalars together (modulo the group order). Returns whether it overflowed. */
|
/** Add two scalars together (modulo the group order). Returns whether it overflowed. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
static int rustsecp256k1_v0_1_1_scalar_add(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b);
|
||||||
|
|
||||||
/** Conditionally add a power of two to a scalar. The result is not allowed to overflow. */
|
/** Conditionally add a power of two to a scalar. The result is not allowed to overflow. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag);
|
static void rustsecp256k1_v0_1_1_scalar_cadd_bit(rustsecp256k1_v0_1_1_scalar *r, unsigned int bit, int flag);
|
||||||
|
|
||||||
/** Multiply two scalars (modulo the group order). */
|
/** Multiply two scalars (modulo the group order). */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
static void rustsecp256k1_v0_1_1_scalar_mul(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b);
|
||||||
|
|
||||||
/** Shift a scalar right by some amount strictly between 0 and 16, returning
|
/** Shift a scalar right by some amount strictly between 0 and 16, returning
|
||||||
* the low bits that were shifted off */
|
* the low bits that were shifted off */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, int n);
|
static int rustsecp256k1_v0_1_1_scalar_shr_int(rustsecp256k1_v0_1_1_scalar *r, int n);
|
||||||
|
|
||||||
/** Compute the square of a scalar (modulo the group order). */
|
/** Compute the square of a scalar (modulo the group order). */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_sqr(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Compute the inverse of a scalar (modulo the group order). */
|
/** Compute the inverse of a scalar (modulo the group order). */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_inverse(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_inverse(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Compute the inverse of a scalar (modulo the group order), without constant-time guarantee. */
|
/** Compute the inverse of a scalar (modulo the group order), without constant-time guarantee. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_inverse_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_inverse_var(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Compute the complement of a scalar (modulo the group order). */
|
/** Compute the complement of a scalar (modulo the group order). */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_negate(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Check whether a scalar equals zero. */
|
/** Check whether a scalar equals zero. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_zero(const rustsecp256k1_v0_1_0_scalar *a);
|
static int rustsecp256k1_v0_1_1_scalar_is_zero(const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Check whether a scalar equals one. */
|
/** Check whether a scalar equals one. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_one(const rustsecp256k1_v0_1_0_scalar *a);
|
static int rustsecp256k1_v0_1_1_scalar_is_one(const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Check whether a scalar, considered as an nonnegative integer, is even. */
|
/** Check whether a scalar, considered as an nonnegative integer, is even. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_even(const rustsecp256k1_v0_1_0_scalar *a);
|
static int rustsecp256k1_v0_1_1_scalar_is_even(const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Check whether a scalar is higher than the group order divided by 2. */
|
/** Check whether a scalar is higher than the group order divided by 2. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar *a);
|
static int rustsecp256k1_v0_1_1_scalar_is_high(const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Conditionally negate a number, in constant time.
|
/** Conditionally negate a number, in constant time.
|
||||||
* Returns -1 if the number was negated, 1 otherwise */
|
* Returns -1 if the number was negated, 1 otherwise */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *a, int flag);
|
static int rustsecp256k1_v0_1_1_scalar_cond_negate(rustsecp256k1_v0_1_1_scalar *a, int flag);
|
||||||
|
|
||||||
#ifndef USE_NUM_NONE
|
#ifndef USE_NUM_NONE
|
||||||
/** Convert a scalar to a number. */
|
/** Convert a scalar to a number. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_num(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_get_num(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
|
|
||||||
/** Get the order of the group as a number. */
|
/** Get the order of the group as a number. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_order_get_num(rustsecp256k1_v0_1_0_num *r);
|
static void rustsecp256k1_v0_1_1_scalar_order_get_num(rustsecp256k1_v0_1_1_num *r);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Compare two scalars. */
|
/** Compare two scalars. */
|
||||||
static int rustsecp256k1_v0_1_0_scalar_eq(const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
static int rustsecp256k1_v0_1_1_scalar_eq(const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b);
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
/** Find r1 and r2 such that r1+r2*2^128 = a. */
|
/** Find r1 and r2 such that r1+r2*2^128 = a. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_split_128(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
/** Find r1 and r2 such that r1+r2*lambda = a, and r1 and r2 are maximum 128 bits long (see rustsecp256k1_v0_1_0_gej_mul_lambda). */
|
/** Find r1 and r2 such that r1+r2*lambda = a, and r1 and r2 are maximum 128 bits long (see rustsecp256k1_v0_1_1_gej_mul_lambda). */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_lambda(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a);
|
static void rustsecp256k1_v0_1_1_scalar_split_lambda(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Multiply a and b (without taking the modulus!), divide by 2**shift, and round to the nearest integer. Shift must be at least 256. */
|
/** Multiply a and b (without taking the modulus!), divide by 2**shift, and round to the nearest integer. Shift must be at least 256. */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b, unsigned int shift);
|
static void rustsecp256k1_v0_1_1_scalar_mul_shift_var(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b, unsigned int shift);
|
||||||
|
|
||||||
#endif /* SECP256K1_SCALAR_H */
|
#endif /* SECP256K1_SCALAR_H */
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
/** A scalar modulo the group order of the secp256k1 curve. */
|
/** A scalar modulo the group order of the secp256k1 curve. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
uint64_t d[4];
|
uint64_t d[4];
|
||||||
} rustsecp256k1_v0_1_0_scalar;
|
} rustsecp256k1_v0_1_1_scalar;
|
||||||
|
|
||||||
#define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{((uint64_t)(d1)) << 32 | (d0), ((uint64_t)(d3)) << 32 | (d2), ((uint64_t)(d5)) << 32 | (d4), ((uint64_t)(d7)) << 32 | (d6)}}
|
#define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{((uint64_t)(d1)) << 32 | (d0), ((uint64_t)(d3)) << 32 | (d2), ((uint64_t)(d5)) << 32 | (d4), ((uint64_t)(d7)) << 32 | (d6)}}
|
||||||
|
|
||||||
|
|
|
@ -24,37 +24,37 @@
|
||||||
#define SECP256K1_N_H_2 ((uint64_t)0xFFFFFFFFFFFFFFFFULL)
|
#define SECP256K1_N_H_2 ((uint64_t)0xFFFFFFFFFFFFFFFFULL)
|
||||||
#define SECP256K1_N_H_3 ((uint64_t)0x7FFFFFFFFFFFFFFFULL)
|
#define SECP256K1_N_H_3 ((uint64_t)0x7FFFFFFFFFFFFFFFULL)
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_1_0_scalar *r) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_clear(rustsecp256k1_v0_1_1_scalar *r) {
|
||||||
r->d[0] = 0;
|
r->d[0] = 0;
|
||||||
r->d[1] = 0;
|
r->d[1] = 0;
|
||||||
r->d[2] = 0;
|
r->d[2] = 0;
|
||||||
r->d[3] = 0;
|
r->d[3] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v0_1_0_scalar *r, unsigned int v) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_set_int(rustsecp256k1_v0_1_1_scalar *r, unsigned int v) {
|
||||||
r->d[0] = v;
|
r->d[0] = v;
|
||||||
r->d[1] = 0;
|
r->d[1] = 0;
|
||||||
r->d[2] = 0;
|
r->d[2] = 0;
|
||||||
r->d[3] = 0;
|
r->d[3] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
VERIFY_CHECK((offset + count - 1) >> 6 == offset >> 6);
|
VERIFY_CHECK((offset + count - 1) >> 6 == offset >> 6);
|
||||||
return (a->d[offset >> 6] >> (offset & 0x3F)) & ((((uint64_t)1) << count) - 1);
|
return (a->d[offset >> 6] >> (offset & 0x3F)) & ((((uint64_t)1) << count) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits_var(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits_var(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
VERIFY_CHECK(count < 32);
|
VERIFY_CHECK(count < 32);
|
||||||
VERIFY_CHECK(offset + count <= 256);
|
VERIFY_CHECK(offset + count <= 256);
|
||||||
if ((offset + count - 1) >> 6 == offset >> 6) {
|
if ((offset + count - 1) >> 6 == offset >> 6) {
|
||||||
return rustsecp256k1_v0_1_0_scalar_get_bits(a, offset, count);
|
return rustsecp256k1_v0_1_1_scalar_get_bits(a, offset, count);
|
||||||
} else {
|
} else {
|
||||||
VERIFY_CHECK((offset >> 6) + 1 < 4);
|
VERIFY_CHECK((offset >> 6) + 1 < 4);
|
||||||
return ((a->d[offset >> 6] >> (offset & 0x3F)) | (a->d[(offset >> 6) + 1] << (64 - (offset & 0x3F)))) & ((((uint64_t)1) << count) - 1);
|
return ((a->d[offset >> 6] >> (offset & 0x3F)) | (a->d[(offset >> 6) + 1] << (64 - (offset & 0x3F)))) & ((((uint64_t)1) << count) - 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_check_overflow(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_check_overflow(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
int yes = 0;
|
int yes = 0;
|
||||||
int no = 0;
|
int no = 0;
|
||||||
no |= (a->d[3] < SECP256K1_N_3); /* No need for a > check. */
|
no |= (a->d[3] < SECP256K1_N_3); /* No need for a > check. */
|
||||||
|
@ -66,7 +66,7 @@ SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_check_overflow(const rus
|
||||||
return yes;
|
return yes;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_reduce(rustsecp256k1_v0_1_0_scalar *r, unsigned int overflow) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_reduce(rustsecp256k1_v0_1_1_scalar *r, unsigned int overflow) {
|
||||||
uint128_t t;
|
uint128_t t;
|
||||||
VERIFY_CHECK(overflow <= 1);
|
VERIFY_CHECK(overflow <= 1);
|
||||||
t = (uint128_t)r->d[0] + overflow * SECP256K1_N_C_0;
|
t = (uint128_t)r->d[0] + overflow * SECP256K1_N_C_0;
|
||||||
|
@ -80,7 +80,7 @@ SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_reduce(rustsecp256k1_v0_
|
||||||
return overflow;
|
return overflow;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static int rustsecp256k1_v0_1_1_scalar_add(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
int overflow;
|
int overflow;
|
||||||
uint128_t t = (uint128_t)a->d[0] + b->d[0];
|
uint128_t t = (uint128_t)a->d[0] + b->d[0];
|
||||||
r->d[0] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
r->d[0] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
||||||
|
@ -90,13 +90,13 @@ static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const
|
||||||
r->d[2] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
r->d[2] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
||||||
t += (uint128_t)a->d[3] + b->d[3];
|
t += (uint128_t)a->d[3] + b->d[3];
|
||||||
r->d[3] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
r->d[3] = t & 0xFFFFFFFFFFFFFFFFULL; t >>= 64;
|
||||||
overflow = t + rustsecp256k1_v0_1_0_scalar_check_overflow(r);
|
overflow = t + rustsecp256k1_v0_1_1_scalar_check_overflow(r);
|
||||||
VERIFY_CHECK(overflow == 0 || overflow == 1);
|
VERIFY_CHECK(overflow == 0 || overflow == 1);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce(r, overflow);
|
rustsecp256k1_v0_1_1_scalar_reduce(r, overflow);
|
||||||
return overflow;
|
return overflow;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag) {
|
static void rustsecp256k1_v0_1_1_scalar_cadd_bit(rustsecp256k1_v0_1_1_scalar *r, unsigned int bit, int flag) {
|
||||||
uint128_t t;
|
uint128_t t;
|
||||||
VERIFY_CHECK(bit < 256);
|
VERIFY_CHECK(bit < 256);
|
||||||
bit += ((uint32_t) flag - 1) & 0x100; /* forcing (bit >> 6) > 3 makes this a noop */
|
bit += ((uint32_t) flag - 1) & 0x100; /* forcing (bit >> 6) > 3 makes this a noop */
|
||||||
|
@ -110,35 +110,35 @@ static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r,
|
||||||
r->d[3] = t & 0xFFFFFFFFFFFFFFFFULL;
|
r->d[3] = t & 0xFFFFFFFFFFFFFFFFULL;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK((t >> 64) == 0);
|
VERIFY_CHECK((t >> 64) == 0);
|
||||||
VERIFY_CHECK(rustsecp256k1_v0_1_0_scalar_check_overflow(r) == 0);
|
VERIFY_CHECK(rustsecp256k1_v0_1_1_scalar_check_overflow(r) == 0);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r, const unsigned char *b32, int *overflow) {
|
static void rustsecp256k1_v0_1_1_scalar_set_b32(rustsecp256k1_v0_1_1_scalar *r, const unsigned char *b32, int *overflow) {
|
||||||
int over;
|
int over;
|
||||||
r->d[0] = (uint64_t)b32[31] | (uint64_t)b32[30] << 8 | (uint64_t)b32[29] << 16 | (uint64_t)b32[28] << 24 | (uint64_t)b32[27] << 32 | (uint64_t)b32[26] << 40 | (uint64_t)b32[25] << 48 | (uint64_t)b32[24] << 56;
|
r->d[0] = (uint64_t)b32[31] | (uint64_t)b32[30] << 8 | (uint64_t)b32[29] << 16 | (uint64_t)b32[28] << 24 | (uint64_t)b32[27] << 32 | (uint64_t)b32[26] << 40 | (uint64_t)b32[25] << 48 | (uint64_t)b32[24] << 56;
|
||||||
r->d[1] = (uint64_t)b32[23] | (uint64_t)b32[22] << 8 | (uint64_t)b32[21] << 16 | (uint64_t)b32[20] << 24 | (uint64_t)b32[19] << 32 | (uint64_t)b32[18] << 40 | (uint64_t)b32[17] << 48 | (uint64_t)b32[16] << 56;
|
r->d[1] = (uint64_t)b32[23] | (uint64_t)b32[22] << 8 | (uint64_t)b32[21] << 16 | (uint64_t)b32[20] << 24 | (uint64_t)b32[19] << 32 | (uint64_t)b32[18] << 40 | (uint64_t)b32[17] << 48 | (uint64_t)b32[16] << 56;
|
||||||
r->d[2] = (uint64_t)b32[15] | (uint64_t)b32[14] << 8 | (uint64_t)b32[13] << 16 | (uint64_t)b32[12] << 24 | (uint64_t)b32[11] << 32 | (uint64_t)b32[10] << 40 | (uint64_t)b32[9] << 48 | (uint64_t)b32[8] << 56;
|
r->d[2] = (uint64_t)b32[15] | (uint64_t)b32[14] << 8 | (uint64_t)b32[13] << 16 | (uint64_t)b32[12] << 24 | (uint64_t)b32[11] << 32 | (uint64_t)b32[10] << 40 | (uint64_t)b32[9] << 48 | (uint64_t)b32[8] << 56;
|
||||||
r->d[3] = (uint64_t)b32[7] | (uint64_t)b32[6] << 8 | (uint64_t)b32[5] << 16 | (uint64_t)b32[4] << 24 | (uint64_t)b32[3] << 32 | (uint64_t)b32[2] << 40 | (uint64_t)b32[1] << 48 | (uint64_t)b32[0] << 56;
|
r->d[3] = (uint64_t)b32[7] | (uint64_t)b32[6] << 8 | (uint64_t)b32[5] << 16 | (uint64_t)b32[4] << 24 | (uint64_t)b32[3] << 32 | (uint64_t)b32[2] << 40 | (uint64_t)b32[1] << 48 | (uint64_t)b32[0] << 56;
|
||||||
over = rustsecp256k1_v0_1_0_scalar_reduce(r, rustsecp256k1_v0_1_0_scalar_check_overflow(r));
|
over = rustsecp256k1_v0_1_1_scalar_reduce(r, rustsecp256k1_v0_1_1_scalar_check_overflow(r));
|
||||||
if (overflow) {
|
if (overflow) {
|
||||||
*overflow = over;
|
*overflow = over;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_0_scalar* a) {
|
static void rustsecp256k1_v0_1_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_1_scalar* a) {
|
||||||
bin[0] = a->d[3] >> 56; bin[1] = a->d[3] >> 48; bin[2] = a->d[3] >> 40; bin[3] = a->d[3] >> 32; bin[4] = a->d[3] >> 24; bin[5] = a->d[3] >> 16; bin[6] = a->d[3] >> 8; bin[7] = a->d[3];
|
bin[0] = a->d[3] >> 56; bin[1] = a->d[3] >> 48; bin[2] = a->d[3] >> 40; bin[3] = a->d[3] >> 32; bin[4] = a->d[3] >> 24; bin[5] = a->d[3] >> 16; bin[6] = a->d[3] >> 8; bin[7] = a->d[3];
|
||||||
bin[8] = a->d[2] >> 56; bin[9] = a->d[2] >> 48; bin[10] = a->d[2] >> 40; bin[11] = a->d[2] >> 32; bin[12] = a->d[2] >> 24; bin[13] = a->d[2] >> 16; bin[14] = a->d[2] >> 8; bin[15] = a->d[2];
|
bin[8] = a->d[2] >> 56; bin[9] = a->d[2] >> 48; bin[10] = a->d[2] >> 40; bin[11] = a->d[2] >> 32; bin[12] = a->d[2] >> 24; bin[13] = a->d[2] >> 16; bin[14] = a->d[2] >> 8; bin[15] = a->d[2];
|
||||||
bin[16] = a->d[1] >> 56; bin[17] = a->d[1] >> 48; bin[18] = a->d[1] >> 40; bin[19] = a->d[1] >> 32; bin[20] = a->d[1] >> 24; bin[21] = a->d[1] >> 16; bin[22] = a->d[1] >> 8; bin[23] = a->d[1];
|
bin[16] = a->d[1] >> 56; bin[17] = a->d[1] >> 48; bin[18] = a->d[1] >> 40; bin[19] = a->d[1] >> 32; bin[20] = a->d[1] >> 24; bin[21] = a->d[1] >> 16; bin[22] = a->d[1] >> 8; bin[23] = a->d[1];
|
||||||
bin[24] = a->d[0] >> 56; bin[25] = a->d[0] >> 48; bin[26] = a->d[0] >> 40; bin[27] = a->d[0] >> 32; bin[28] = a->d[0] >> 24; bin[29] = a->d[0] >> 16; bin[30] = a->d[0] >> 8; bin[31] = a->d[0];
|
bin[24] = a->d[0] >> 56; bin[25] = a->d[0] >> 48; bin[26] = a->d[0] >> 40; bin[27] = a->d[0] >> 32; bin[28] = a->d[0] >> 24; bin[29] = a->d[0] >> 16; bin[30] = a->d[0] >> 8; bin[31] = a->d[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_zero(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_zero(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return (a->d[0] | a->d[1] | a->d[2] | a->d[3]) == 0;
|
return (a->d[0] | a->d[1] | a->d[2] | a->d[3]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_negate(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
uint64_t nonzero = 0xFFFFFFFFFFFFFFFFULL * (rustsecp256k1_v0_1_0_scalar_is_zero(a) == 0);
|
uint64_t nonzero = 0xFFFFFFFFFFFFFFFFULL * (rustsecp256k1_v0_1_1_scalar_is_zero(a) == 0);
|
||||||
uint128_t t = (uint128_t)(~a->d[0]) + SECP256K1_N_0 + 1;
|
uint128_t t = (uint128_t)(~a->d[0]) + SECP256K1_N_0 + 1;
|
||||||
r->d[0] = t & nonzero; t >>= 64;
|
r->d[0] = t & nonzero; t >>= 64;
|
||||||
t += (uint128_t)(~a->d[1]) + SECP256K1_N_1;
|
t += (uint128_t)(~a->d[1]) + SECP256K1_N_1;
|
||||||
|
@ -149,11 +149,11 @@ static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, c
|
||||||
r->d[3] = t & nonzero;
|
r->d[3] = t & nonzero;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_one(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_one(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3]) == 0;
|
return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar *a) {
|
static int rustsecp256k1_v0_1_1_scalar_is_high(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
int yes = 0;
|
int yes = 0;
|
||||||
int no = 0;
|
int no = 0;
|
||||||
no |= (a->d[3] < SECP256K1_N_H_3);
|
no |= (a->d[3] < SECP256K1_N_H_3);
|
||||||
|
@ -165,11 +165,11 @@ static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar
|
||||||
return yes;
|
return yes;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *r, int flag) {
|
static int rustsecp256k1_v0_1_1_scalar_cond_negate(rustsecp256k1_v0_1_1_scalar *r, int flag) {
|
||||||
/* If we are flag = 0, mask = 00...00 and this is a no-op;
|
/* If we are flag = 0, mask = 00...00 and this is a no-op;
|
||||||
* if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1_v0_1_0_scalar_negate */
|
* if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1_v0_1_1_scalar_negate */
|
||||||
uint64_t mask = !flag - 1;
|
uint64_t mask = !flag - 1;
|
||||||
uint64_t nonzero = (rustsecp256k1_v0_1_0_scalar_is_zero(r) != 0) - 1;
|
uint64_t nonzero = (rustsecp256k1_v0_1_1_scalar_is_zero(r) != 0) - 1;
|
||||||
uint128_t t = (uint128_t)(r->d[0] ^ mask) + ((SECP256K1_N_0 + 1) & mask);
|
uint128_t t = (uint128_t)(r->d[0] ^ mask) + ((SECP256K1_N_0 + 1) & mask);
|
||||||
r->d[0] = t & nonzero; t >>= 64;
|
r->d[0] = t & nonzero; t >>= 64;
|
||||||
t += (uint128_t)(r->d[1] ^ mask) + (SECP256K1_N_1 & mask);
|
t += (uint128_t)(r->d[1] ^ mask) + (SECP256K1_N_1 & mask);
|
||||||
|
@ -267,7 +267,7 @@ static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *
|
||||||
VERIFY_CHECK(c2 == 0); \
|
VERIFY_CHECK(c2 == 0); \
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_reduce_512(rustsecp256k1_v0_1_0_scalar *r, const uint64_t *l) {
|
static void rustsecp256k1_v0_1_1_scalar_reduce_512(rustsecp256k1_v0_1_1_scalar *r, const uint64_t *l) {
|
||||||
#ifdef USE_ASM_X86_64
|
#ifdef USE_ASM_X86_64
|
||||||
/* Reduce 512 bits into 385. */
|
/* Reduce 512 bits into 385. */
|
||||||
uint64_t m0, m1, m2, m3, m4, m5, m6;
|
uint64_t m0, m1, m2, m3, m4, m5, m6;
|
||||||
|
@ -573,10 +573,10 @@ static void rustsecp256k1_v0_1_0_scalar_reduce_512(rustsecp256k1_v0_1_0_scalar *
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Final reduction of r. */
|
/* Final reduction of r. */
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce(r, c + rustsecp256k1_v0_1_0_scalar_check_overflow(r));
|
rustsecp256k1_v0_1_1_scalar_reduce(r, c + rustsecp256k1_v0_1_1_scalar_check_overflow(r));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul_512(uint64_t l[8], const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static void rustsecp256k1_v0_1_1_scalar_mul_512(uint64_t l[8], const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
#ifdef USE_ASM_X86_64
|
#ifdef USE_ASM_X86_64
|
||||||
const uint64_t *pb = b->d;
|
const uint64_t *pb = b->d;
|
||||||
__asm__ __volatile__(
|
__asm__ __volatile__(
|
||||||
|
@ -743,7 +743,7 @@ static void rustsecp256k1_v0_1_0_scalar_mul_512(uint64_t l[8], const rustsecp256
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr_512(uint64_t l[8], const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_sqr_512(uint64_t l[8], const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
#ifdef USE_ASM_X86_64
|
#ifdef USE_ASM_X86_64
|
||||||
__asm__ __volatile__(
|
__asm__ __volatile__(
|
||||||
/* Preload */
|
/* Preload */
|
||||||
|
@ -888,13 +888,13 @@ static void rustsecp256k1_v0_1_0_scalar_sqr_512(uint64_t l[8], const rustsecp256
|
||||||
#undef extract
|
#undef extract
|
||||||
#undef extract_fast
|
#undef extract_fast
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static void rustsecp256k1_v0_1_1_scalar_mul(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
uint64_t l[8];
|
uint64_t l[8];
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_512(l, a, b);
|
rustsecp256k1_v0_1_1_scalar_mul_512(l, a, b);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce_512(r, l);
|
rustsecp256k1_v0_1_1_scalar_reduce_512(r, l);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, int n) {
|
static int rustsecp256k1_v0_1_1_scalar_shr_int(rustsecp256k1_v0_1_1_scalar *r, int n) {
|
||||||
int ret;
|
int ret;
|
||||||
VERIFY_CHECK(n > 0);
|
VERIFY_CHECK(n > 0);
|
||||||
VERIFY_CHECK(n < 16);
|
VERIFY_CHECK(n < 16);
|
||||||
|
@ -906,14 +906,14 @@ static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, i
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_sqr(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
uint64_t l[8];
|
uint64_t l[8];
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr_512(l, a);
|
rustsecp256k1_v0_1_1_scalar_sqr_512(l, a);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce_512(r, l);
|
rustsecp256k1_v0_1_1_scalar_reduce_512(r, l);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_split_128(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
r1->d[0] = a->d[0];
|
r1->d[0] = a->d[0];
|
||||||
r1->d[1] = a->d[1];
|
r1->d[1] = a->d[1];
|
||||||
r1->d[2] = 0;
|
r1->d[2] = 0;
|
||||||
|
@ -925,17 +925,17 @@ static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_eq(const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_eq(const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3])) == 0;
|
return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3])) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b, unsigned int shift) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_mul_shift_var(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b, unsigned int shift) {
|
||||||
uint64_t l[8];
|
uint64_t l[8];
|
||||||
unsigned int shiftlimbs;
|
unsigned int shiftlimbs;
|
||||||
unsigned int shiftlow;
|
unsigned int shiftlow;
|
||||||
unsigned int shifthigh;
|
unsigned int shifthigh;
|
||||||
VERIFY_CHECK(shift >= 256);
|
VERIFY_CHECK(shift >= 256);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_512(l, a, b);
|
rustsecp256k1_v0_1_1_scalar_mul_512(l, a, b);
|
||||||
shiftlimbs = shift >> 6;
|
shiftlimbs = shift >> 6;
|
||||||
shiftlow = shift & 0x3F;
|
shiftlow = shift & 0x3F;
|
||||||
shifthigh = 64 - shiftlow;
|
shifthigh = 64 - shiftlow;
|
||||||
|
@ -943,7 +943,7 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp2
|
||||||
r->d[1] = shift < 448 ? (l[1 + shiftlimbs] >> shiftlow | (shift < 384 && shiftlow ? (l[2 + shiftlimbs] << shifthigh) : 0)) : 0;
|
r->d[1] = shift < 448 ? (l[1 + shiftlimbs] >> shiftlow | (shift < 384 && shiftlow ? (l[2 + shiftlimbs] << shifthigh) : 0)) : 0;
|
||||||
r->d[2] = shift < 384 ? (l[2 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[3 + shiftlimbs] << shifthigh) : 0)) : 0;
|
r->d[2] = shift < 384 ? (l[2 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[3 + shiftlimbs] << shifthigh) : 0)) : 0;
|
||||||
r->d[3] = shift < 320 ? (l[3 + shiftlimbs] >> shiftlow) : 0;
|
r->d[3] = shift < 320 ? (l[3 + shiftlimbs] >> shiftlow) : 0;
|
||||||
rustsecp256k1_v0_1_0_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 6] >> ((shift - 1) & 0x3f)) & 1);
|
rustsecp256k1_v0_1_1_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 6] >> ((shift - 1) & 0x3f)) & 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
/** A scalar modulo the group order of the secp256k1 curve. */
|
/** A scalar modulo the group order of the secp256k1 curve. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
uint32_t d[8];
|
uint32_t d[8];
|
||||||
} rustsecp256k1_v0_1_0_scalar;
|
} rustsecp256k1_v0_1_1_scalar;
|
||||||
|
|
||||||
#define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{(d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7)}}
|
#define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{(d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7)}}
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
#define SECP256K1_N_H_6 ((uint32_t)0xFFFFFFFFUL)
|
#define SECP256K1_N_H_6 ((uint32_t)0xFFFFFFFFUL)
|
||||||
#define SECP256K1_N_H_7 ((uint32_t)0x7FFFFFFFUL)
|
#define SECP256K1_N_H_7 ((uint32_t)0x7FFFFFFFUL)
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_1_0_scalar *r) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_clear(rustsecp256k1_v0_1_1_scalar *r) {
|
||||||
r->d[0] = 0;
|
r->d[0] = 0;
|
||||||
r->d[1] = 0;
|
r->d[1] = 0;
|
||||||
r->d[2] = 0;
|
r->d[2] = 0;
|
||||||
|
@ -45,7 +45,7 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_
|
||||||
r->d[7] = 0;
|
r->d[7] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v0_1_0_scalar *r, unsigned int v) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_set_int(rustsecp256k1_v0_1_1_scalar *r, unsigned int v) {
|
||||||
r->d[0] = v;
|
r->d[0] = v;
|
||||||
r->d[1] = 0;
|
r->d[1] = 0;
|
||||||
r->d[2] = 0;
|
r->d[2] = 0;
|
||||||
|
@ -56,23 +56,23 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v
|
||||||
r->d[7] = 0;
|
r->d[7] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
VERIFY_CHECK((offset + count - 1) >> 5 == offset >> 5);
|
VERIFY_CHECK((offset + count - 1) >> 5 == offset >> 5);
|
||||||
return (a->d[offset >> 5] >> (offset & 0x1F)) & ((1 << count) - 1);
|
return (a->d[offset >> 5] >> (offset & 0x1F)) & ((1 << count) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits_var(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits_var(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
VERIFY_CHECK(count < 32);
|
VERIFY_CHECK(count < 32);
|
||||||
VERIFY_CHECK(offset + count <= 256);
|
VERIFY_CHECK(offset + count <= 256);
|
||||||
if ((offset + count - 1) >> 5 == offset >> 5) {
|
if ((offset + count - 1) >> 5 == offset >> 5) {
|
||||||
return rustsecp256k1_v0_1_0_scalar_get_bits(a, offset, count);
|
return rustsecp256k1_v0_1_1_scalar_get_bits(a, offset, count);
|
||||||
} else {
|
} else {
|
||||||
VERIFY_CHECK((offset >> 5) + 1 < 8);
|
VERIFY_CHECK((offset >> 5) + 1 < 8);
|
||||||
return ((a->d[offset >> 5] >> (offset & 0x1F)) | (a->d[(offset >> 5) + 1] << (32 - (offset & 0x1F)))) & ((((uint32_t)1) << count) - 1);
|
return ((a->d[offset >> 5] >> (offset & 0x1F)) | (a->d[(offset >> 5) + 1] << (32 - (offset & 0x1F)))) & ((((uint32_t)1) << count) - 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_check_overflow(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_check_overflow(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
int yes = 0;
|
int yes = 0;
|
||||||
int no = 0;
|
int no = 0;
|
||||||
no |= (a->d[7] < SECP256K1_N_7); /* No need for a > check. */
|
no |= (a->d[7] < SECP256K1_N_7); /* No need for a > check. */
|
||||||
|
@ -90,7 +90,7 @@ SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_check_overflow(const rus
|
||||||
return yes;
|
return yes;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_reduce(rustsecp256k1_v0_1_0_scalar *r, uint32_t overflow) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_reduce(rustsecp256k1_v0_1_1_scalar *r, uint32_t overflow) {
|
||||||
uint64_t t;
|
uint64_t t;
|
||||||
VERIFY_CHECK(overflow <= 1);
|
VERIFY_CHECK(overflow <= 1);
|
||||||
t = (uint64_t)r->d[0] + overflow * SECP256K1_N_C_0;
|
t = (uint64_t)r->d[0] + overflow * SECP256K1_N_C_0;
|
||||||
|
@ -112,7 +112,7 @@ SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_reduce(rustsecp256k1_v0_
|
||||||
return overflow;
|
return overflow;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static int rustsecp256k1_v0_1_1_scalar_add(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
int overflow;
|
int overflow;
|
||||||
uint64_t t = (uint64_t)a->d[0] + b->d[0];
|
uint64_t t = (uint64_t)a->d[0] + b->d[0];
|
||||||
r->d[0] = t & 0xFFFFFFFFULL; t >>= 32;
|
r->d[0] = t & 0xFFFFFFFFULL; t >>= 32;
|
||||||
|
@ -130,13 +130,13 @@ static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const
|
||||||
r->d[6] = t & 0xFFFFFFFFULL; t >>= 32;
|
r->d[6] = t & 0xFFFFFFFFULL; t >>= 32;
|
||||||
t += (uint64_t)a->d[7] + b->d[7];
|
t += (uint64_t)a->d[7] + b->d[7];
|
||||||
r->d[7] = t & 0xFFFFFFFFULL; t >>= 32;
|
r->d[7] = t & 0xFFFFFFFFULL; t >>= 32;
|
||||||
overflow = t + rustsecp256k1_v0_1_0_scalar_check_overflow(r);
|
overflow = t + rustsecp256k1_v0_1_1_scalar_check_overflow(r);
|
||||||
VERIFY_CHECK(overflow == 0 || overflow == 1);
|
VERIFY_CHECK(overflow == 0 || overflow == 1);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce(r, overflow);
|
rustsecp256k1_v0_1_1_scalar_reduce(r, overflow);
|
||||||
return overflow;
|
return overflow;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag) {
|
static void rustsecp256k1_v0_1_1_scalar_cadd_bit(rustsecp256k1_v0_1_1_scalar *r, unsigned int bit, int flag) {
|
||||||
uint64_t t;
|
uint64_t t;
|
||||||
VERIFY_CHECK(bit < 256);
|
VERIFY_CHECK(bit < 256);
|
||||||
bit += ((uint32_t) flag - 1) & 0x100; /* forcing (bit >> 5) > 7 makes this a noop */
|
bit += ((uint32_t) flag - 1) & 0x100; /* forcing (bit >> 5) > 7 makes this a noop */
|
||||||
|
@ -158,11 +158,11 @@ static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r,
|
||||||
r->d[7] = t & 0xFFFFFFFFULL;
|
r->d[7] = t & 0xFFFFFFFFULL;
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK((t >> 32) == 0);
|
VERIFY_CHECK((t >> 32) == 0);
|
||||||
VERIFY_CHECK(rustsecp256k1_v0_1_0_scalar_check_overflow(r) == 0);
|
VERIFY_CHECK(rustsecp256k1_v0_1_1_scalar_check_overflow(r) == 0);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r, const unsigned char *b32, int *overflow) {
|
static void rustsecp256k1_v0_1_1_scalar_set_b32(rustsecp256k1_v0_1_1_scalar *r, const unsigned char *b32, int *overflow) {
|
||||||
int over;
|
int over;
|
||||||
r->d[0] = (uint32_t)b32[31] | (uint32_t)b32[30] << 8 | (uint32_t)b32[29] << 16 | (uint32_t)b32[28] << 24;
|
r->d[0] = (uint32_t)b32[31] | (uint32_t)b32[30] << 8 | (uint32_t)b32[29] << 16 | (uint32_t)b32[28] << 24;
|
||||||
r->d[1] = (uint32_t)b32[27] | (uint32_t)b32[26] << 8 | (uint32_t)b32[25] << 16 | (uint32_t)b32[24] << 24;
|
r->d[1] = (uint32_t)b32[27] | (uint32_t)b32[26] << 8 | (uint32_t)b32[25] << 16 | (uint32_t)b32[24] << 24;
|
||||||
|
@ -172,13 +172,13 @@ static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r,
|
||||||
r->d[5] = (uint32_t)b32[11] | (uint32_t)b32[10] << 8 | (uint32_t)b32[9] << 16 | (uint32_t)b32[8] << 24;
|
r->d[5] = (uint32_t)b32[11] | (uint32_t)b32[10] << 8 | (uint32_t)b32[9] << 16 | (uint32_t)b32[8] << 24;
|
||||||
r->d[6] = (uint32_t)b32[7] | (uint32_t)b32[6] << 8 | (uint32_t)b32[5] << 16 | (uint32_t)b32[4] << 24;
|
r->d[6] = (uint32_t)b32[7] | (uint32_t)b32[6] << 8 | (uint32_t)b32[5] << 16 | (uint32_t)b32[4] << 24;
|
||||||
r->d[7] = (uint32_t)b32[3] | (uint32_t)b32[2] << 8 | (uint32_t)b32[1] << 16 | (uint32_t)b32[0] << 24;
|
r->d[7] = (uint32_t)b32[3] | (uint32_t)b32[2] << 8 | (uint32_t)b32[1] << 16 | (uint32_t)b32[0] << 24;
|
||||||
over = rustsecp256k1_v0_1_0_scalar_reduce(r, rustsecp256k1_v0_1_0_scalar_check_overflow(r));
|
over = rustsecp256k1_v0_1_1_scalar_reduce(r, rustsecp256k1_v0_1_1_scalar_check_overflow(r));
|
||||||
if (overflow) {
|
if (overflow) {
|
||||||
*overflow = over;
|
*overflow = over;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_0_scalar* a) {
|
static void rustsecp256k1_v0_1_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_1_scalar* a) {
|
||||||
bin[0] = a->d[7] >> 24; bin[1] = a->d[7] >> 16; bin[2] = a->d[7] >> 8; bin[3] = a->d[7];
|
bin[0] = a->d[7] >> 24; bin[1] = a->d[7] >> 16; bin[2] = a->d[7] >> 8; bin[3] = a->d[7];
|
||||||
bin[4] = a->d[6] >> 24; bin[5] = a->d[6] >> 16; bin[6] = a->d[6] >> 8; bin[7] = a->d[6];
|
bin[4] = a->d[6] >> 24; bin[5] = a->d[6] >> 16; bin[6] = a->d[6] >> 8; bin[7] = a->d[6];
|
||||||
bin[8] = a->d[5] >> 24; bin[9] = a->d[5] >> 16; bin[10] = a->d[5] >> 8; bin[11] = a->d[5];
|
bin[8] = a->d[5] >> 24; bin[9] = a->d[5] >> 16; bin[10] = a->d[5] >> 8; bin[11] = a->d[5];
|
||||||
|
@ -189,12 +189,12 @@ static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustse
|
||||||
bin[28] = a->d[0] >> 24; bin[29] = a->d[0] >> 16; bin[30] = a->d[0] >> 8; bin[31] = a->d[0];
|
bin[28] = a->d[0] >> 24; bin[29] = a->d[0] >> 16; bin[30] = a->d[0] >> 8; bin[31] = a->d[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_zero(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_zero(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return (a->d[0] | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0;
|
return (a->d[0] | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_negate(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1_v0_1_0_scalar_is_zero(a) == 0);
|
uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1_v0_1_1_scalar_is_zero(a) == 0);
|
||||||
uint64_t t = (uint64_t)(~a->d[0]) + SECP256K1_N_0 + 1;
|
uint64_t t = (uint64_t)(~a->d[0]) + SECP256K1_N_0 + 1;
|
||||||
r->d[0] = t & nonzero; t >>= 32;
|
r->d[0] = t & nonzero; t >>= 32;
|
||||||
t += (uint64_t)(~a->d[1]) + SECP256K1_N_1;
|
t += (uint64_t)(~a->d[1]) + SECP256K1_N_1;
|
||||||
|
@ -213,11 +213,11 @@ static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, c
|
||||||
r->d[7] = t & nonzero;
|
r->d[7] = t & nonzero;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_one(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_one(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0;
|
return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar *a) {
|
static int rustsecp256k1_v0_1_1_scalar_is_high(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
int yes = 0;
|
int yes = 0;
|
||||||
int no = 0;
|
int no = 0;
|
||||||
no |= (a->d[7] < SECP256K1_N_H_7);
|
no |= (a->d[7] < SECP256K1_N_H_7);
|
||||||
|
@ -235,11 +235,11 @@ static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar
|
||||||
return yes;
|
return yes;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *r, int flag) {
|
static int rustsecp256k1_v0_1_1_scalar_cond_negate(rustsecp256k1_v0_1_1_scalar *r, int flag) {
|
||||||
/* If we are flag = 0, mask = 00...00 and this is a no-op;
|
/* If we are flag = 0, mask = 00...00 and this is a no-op;
|
||||||
* if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1_v0_1_0_scalar_negate */
|
* if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1_v0_1_1_scalar_negate */
|
||||||
uint32_t mask = !flag - 1;
|
uint32_t mask = !flag - 1;
|
||||||
uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1_v0_1_0_scalar_is_zero(r) == 0);
|
uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1_v0_1_1_scalar_is_zero(r) == 0);
|
||||||
uint64_t t = (uint64_t)(r->d[0] ^ mask) + ((SECP256K1_N_0 + 1) & mask);
|
uint64_t t = (uint64_t)(r->d[0] ^ mask) + ((SECP256K1_N_0 + 1) & mask);
|
||||||
r->d[0] = t & nonzero; t >>= 32;
|
r->d[0] = t & nonzero; t >>= 32;
|
||||||
t += (uint64_t)(r->d[1] ^ mask) + (SECP256K1_N_1 & mask);
|
t += (uint64_t)(r->d[1] ^ mask) + (SECP256K1_N_1 & mask);
|
||||||
|
@ -346,7 +346,7 @@ static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *
|
||||||
VERIFY_CHECK(c2 == 0); \
|
VERIFY_CHECK(c2 == 0); \
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_reduce_512(rustsecp256k1_v0_1_0_scalar *r, const uint32_t *l) {
|
static void rustsecp256k1_v0_1_1_scalar_reduce_512(rustsecp256k1_v0_1_1_scalar *r, const uint32_t *l) {
|
||||||
uint64_t c;
|
uint64_t c;
|
||||||
uint32_t n0 = l[8], n1 = l[9], n2 = l[10], n3 = l[11], n4 = l[12], n5 = l[13], n6 = l[14], n7 = l[15];
|
uint32_t n0 = l[8], n1 = l[9], n2 = l[10], n3 = l[11], n4 = l[12], n5 = l[13], n6 = l[14], n7 = l[15];
|
||||||
uint32_t m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12;
|
uint32_t m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12;
|
||||||
|
@ -485,10 +485,10 @@ static void rustsecp256k1_v0_1_0_scalar_reduce_512(rustsecp256k1_v0_1_0_scalar *
|
||||||
r->d[7] = c & 0xFFFFFFFFUL; c >>= 32;
|
r->d[7] = c & 0xFFFFFFFFUL; c >>= 32;
|
||||||
|
|
||||||
/* Final reduction of r. */
|
/* Final reduction of r. */
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce(r, c + rustsecp256k1_v0_1_0_scalar_check_overflow(r));
|
rustsecp256k1_v0_1_1_scalar_reduce(r, c + rustsecp256k1_v0_1_1_scalar_check_overflow(r));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul_512(uint32_t *l, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static void rustsecp256k1_v0_1_1_scalar_mul_512(uint32_t *l, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
/* 96 bit accumulator. */
|
/* 96 bit accumulator. */
|
||||||
uint32_t c0 = 0, c1 = 0, c2 = 0;
|
uint32_t c0 = 0, c1 = 0, c2 = 0;
|
||||||
|
|
||||||
|
@ -576,7 +576,7 @@ static void rustsecp256k1_v0_1_0_scalar_mul_512(uint32_t *l, const rustsecp256k1
|
||||||
l[15] = c0;
|
l[15] = c0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr_512(uint32_t *l, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_sqr_512(uint32_t *l, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
/* 96 bit accumulator. */
|
/* 96 bit accumulator. */
|
||||||
uint32_t c0 = 0, c1 = 0, c2 = 0;
|
uint32_t c0 = 0, c1 = 0, c2 = 0;
|
||||||
|
|
||||||
|
@ -644,13 +644,13 @@ static void rustsecp256k1_v0_1_0_scalar_sqr_512(uint32_t *l, const rustsecp256k1
|
||||||
#undef extract
|
#undef extract
|
||||||
#undef extract_fast
|
#undef extract_fast
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static void rustsecp256k1_v0_1_1_scalar_mul(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
uint32_t l[16];
|
uint32_t l[16];
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_512(l, a, b);
|
rustsecp256k1_v0_1_1_scalar_mul_512(l, a, b);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce_512(r, l);
|
rustsecp256k1_v0_1_1_scalar_reduce_512(r, l);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, int n) {
|
static int rustsecp256k1_v0_1_1_scalar_shr_int(rustsecp256k1_v0_1_1_scalar *r, int n) {
|
||||||
int ret;
|
int ret;
|
||||||
VERIFY_CHECK(n > 0);
|
VERIFY_CHECK(n > 0);
|
||||||
VERIFY_CHECK(n < 16);
|
VERIFY_CHECK(n < 16);
|
||||||
|
@ -666,14 +666,14 @@ static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, i
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_sqr(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
uint32_t l[16];
|
uint32_t l[16];
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr_512(l, a);
|
rustsecp256k1_v0_1_1_scalar_sqr_512(l, a);
|
||||||
rustsecp256k1_v0_1_0_scalar_reduce_512(r, l);
|
rustsecp256k1_v0_1_1_scalar_reduce_512(r, l);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_split_128(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
r1->d[0] = a->d[0];
|
r1->d[0] = a->d[0];
|
||||||
r1->d[1] = a->d[1];
|
r1->d[1] = a->d[1];
|
||||||
r1->d[2] = a->d[2];
|
r1->d[2] = a->d[2];
|
||||||
|
@ -693,17 +693,17 @@ static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_eq(const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_eq(const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3]) | (a->d[4] ^ b->d[4]) | (a->d[5] ^ b->d[5]) | (a->d[6] ^ b->d[6]) | (a->d[7] ^ b->d[7])) == 0;
|
return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3]) | (a->d[4] ^ b->d[4]) | (a->d[5] ^ b->d[5]) | (a->d[6] ^ b->d[6]) | (a->d[7] ^ b->d[7])) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b, unsigned int shift) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_mul_shift_var(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b, unsigned int shift) {
|
||||||
uint32_t l[16];
|
uint32_t l[16];
|
||||||
unsigned int shiftlimbs;
|
unsigned int shiftlimbs;
|
||||||
unsigned int shiftlow;
|
unsigned int shiftlow;
|
||||||
unsigned int shifthigh;
|
unsigned int shifthigh;
|
||||||
VERIFY_CHECK(shift >= 256);
|
VERIFY_CHECK(shift >= 256);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_512(l, a, b);
|
rustsecp256k1_v0_1_1_scalar_mul_512(l, a, b);
|
||||||
shiftlimbs = shift >> 5;
|
shiftlimbs = shift >> 5;
|
||||||
shiftlow = shift & 0x1F;
|
shiftlow = shift & 0x1F;
|
||||||
shifthigh = 32 - shiftlow;
|
shifthigh = 32 - shiftlow;
|
||||||
|
@ -715,7 +715,7 @@ SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp2
|
||||||
r->d[5] = shift < 352 ? (l[5 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[6 + shiftlimbs] << shifthigh) : 0)) : 0;
|
r->d[5] = shift < 352 ? (l[5 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[6 + shiftlimbs] << shifthigh) : 0)) : 0;
|
||||||
r->d[6] = shift < 320 ? (l[6 + shiftlimbs] >> shiftlow | (shift < 288 && shiftlow ? (l[7 + shiftlimbs] << shifthigh) : 0)) : 0;
|
r->d[6] = shift < 320 ? (l[6 + shiftlimbs] >> shiftlow | (shift < 288 && shiftlow ? (l[7 + shiftlimbs] << shifthigh) : 0)) : 0;
|
||||||
r->d[7] = shift < 288 ? (l[7 + shiftlimbs] >> shiftlow) : 0;
|
r->d[7] = shift < 288 ? (l[7 + shiftlimbs] >> shiftlow) : 0;
|
||||||
rustsecp256k1_v0_1_0_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 5] >> ((shift - 1) & 0x1f)) & 1);
|
rustsecp256k1_v0_1_1_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 5] >> ((shift - 1) & 0x1f)) & 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|
||||||
|
|
|
@ -25,14 +25,14 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef USE_NUM_NONE
|
#ifndef USE_NUM_NONE
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_num(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_get_num(rustsecp256k1_v0_1_1_num *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
unsigned char c[32];
|
unsigned char c[32];
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(c, a);
|
rustsecp256k1_v0_1_1_scalar_get_b32(c, a);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(r, c, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(r, c, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** secp256k1 curve order, see rustsecp256k1_v0_1_0_ecdsa_const_order_as_fe in ecdsa_impl.h */
|
/** secp256k1 curve order, see rustsecp256k1_v0_1_1_ecdsa_const_order_as_fe in ecdsa_impl.h */
|
||||||
static void rustsecp256k1_v0_1_0_scalar_order_get_num(rustsecp256k1_v0_1_0_num *r) {
|
static void rustsecp256k1_v0_1_1_scalar_order_get_num(rustsecp256k1_v0_1_1_num *r) {
|
||||||
#if defined(EXHAUSTIVE_TEST_ORDER)
|
#if defined(EXHAUSTIVE_TEST_ORDER)
|
||||||
static const unsigned char order[32] = {
|
static const unsigned char order[32] = {
|
||||||
0,0,0,0,0,0,0,0,
|
0,0,0,0,0,0,0,0,
|
||||||
|
@ -48,11 +48,11 @@ static void rustsecp256k1_v0_1_0_scalar_order_get_num(rustsecp256k1_v0_1_0_num *
|
||||||
0xBF,0xD2,0x5E,0x8C,0xD0,0x36,0x41,0x41
|
0xBF,0xD2,0x5E,0x8C,0xD0,0x36,0x41,0x41
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(r, order, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(r, order, 32);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_inverse(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *x) {
|
static void rustsecp256k1_v0_1_1_scalar_inverse(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *x) {
|
||||||
#if defined(EXHAUSTIVE_TEST_ORDER)
|
#if defined(EXHAUSTIVE_TEST_ORDER)
|
||||||
int i;
|
int i;
|
||||||
*r = 0;
|
*r = 0;
|
||||||
|
@ -64,180 +64,180 @@ static void rustsecp256k1_v0_1_0_scalar_inverse(rustsecp256k1_v0_1_0_scalar *r,
|
||||||
VERIFY_CHECK(*r != 0);
|
VERIFY_CHECK(*r != 0);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
rustsecp256k1_v0_1_0_scalar *t;
|
rustsecp256k1_v0_1_1_scalar *t;
|
||||||
int i;
|
int i;
|
||||||
/* First compute xN as x ^ (2^N - 1) for some values of N,
|
/* First compute xN as x ^ (2^N - 1) for some values of N,
|
||||||
* and uM as x ^ M for some values of M. */
|
* and uM as x ^ M for some values of M. */
|
||||||
rustsecp256k1_v0_1_0_scalar x2, x3, x6, x8, x14, x28, x56, x112, x126;
|
rustsecp256k1_v0_1_1_scalar x2, x3, x6, x8, x14, x28, x56, x112, x126;
|
||||||
rustsecp256k1_v0_1_0_scalar u2, u5, u9, u11, u13;
|
rustsecp256k1_v0_1_1_scalar u2, u5, u9, u11, u13;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&u2, x);
|
rustsecp256k1_v0_1_1_scalar_sqr(&u2, x);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x2, &u2, x);
|
rustsecp256k1_v0_1_1_scalar_mul(&x2, &u2, x);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u5, &u2, &x2);
|
rustsecp256k1_v0_1_1_scalar_mul(&u5, &u2, &x2);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x3, &u5, &u2);
|
rustsecp256k1_v0_1_1_scalar_mul(&x3, &u5, &u2);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u9, &x3, &u2);
|
rustsecp256k1_v0_1_1_scalar_mul(&u9, &x3, &u2);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u11, &u9, &u2);
|
rustsecp256k1_v0_1_1_scalar_mul(&u11, &u9, &u2);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&u13, &u11, &u2);
|
rustsecp256k1_v0_1_1_scalar_mul(&u13, &u11, &u2);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x6, &u13);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x6, &u13);
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x6, &x6);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x6, &x6);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x6, &x6, &u11);
|
rustsecp256k1_v0_1_1_scalar_mul(&x6, &x6, &u11);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x8, &x6);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x8, &x6);
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x8, &x8);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x8, &x8);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x8, &x8, &x2);
|
rustsecp256k1_v0_1_1_scalar_mul(&x8, &x8, &x2);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x14, &x8);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x14, &x8);
|
||||||
for (i = 0; i < 5; i++) {
|
for (i = 0; i < 5; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x14, &x14);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x14, &x14);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x14, &x14, &x6);
|
rustsecp256k1_v0_1_1_scalar_mul(&x14, &x14, &x6);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x28, &x14);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x28, &x14);
|
||||||
for (i = 0; i < 13; i++) {
|
for (i = 0; i < 13; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x28, &x28);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x28, &x28);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x28, &x28, &x14);
|
rustsecp256k1_v0_1_1_scalar_mul(&x28, &x28, &x14);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x56, &x28);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x56, &x28);
|
||||||
for (i = 0; i < 27; i++) {
|
for (i = 0; i < 27; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x56, &x56);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x56, &x56);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x56, &x56, &x28);
|
rustsecp256k1_v0_1_1_scalar_mul(&x56, &x56, &x28);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x112, &x56);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x112, &x56);
|
||||||
for (i = 0; i < 55; i++) {
|
for (i = 0; i < 55; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x112, &x112);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x112, &x112);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x112, &x112, &x56);
|
rustsecp256k1_v0_1_1_scalar_mul(&x112, &x112, &x56);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x126, &x112);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x126, &x112);
|
||||||
for (i = 0; i < 13; i++) {
|
for (i = 0; i < 13; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(&x126, &x126);
|
rustsecp256k1_v0_1_1_scalar_sqr(&x126, &x126);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&x126, &x126, &x14);
|
rustsecp256k1_v0_1_1_scalar_mul(&x126, &x126, &x14);
|
||||||
|
|
||||||
/* Then accumulate the final result (t starts at x126). */
|
/* Then accumulate the final result (t starts at x126). */
|
||||||
t = &x126;
|
t = &x126;
|
||||||
for (i = 0; i < 3; i++) {
|
for (i = 0; i < 3; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u5); /* 101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u5); /* 101 */
|
||||||
for (i = 0; i < 4; i++) { /* 0 */
|
for (i = 0; i < 4; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 4; i++) { /* 0 */
|
for (i = 0; i < 4; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u5); /* 101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u5); /* 101 */
|
||||||
for (i = 0; i < 5; i++) { /* 0 */
|
for (i = 0; i < 5; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u11); /* 1011 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u11); /* 1011 */
|
||||||
for (i = 0; i < 4; i++) {
|
for (i = 0; i < 4; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u11); /* 1011 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u11); /* 1011 */
|
||||||
for (i = 0; i < 4; i++) { /* 0 */
|
for (i = 0; i < 4; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 5; i++) { /* 00 */
|
for (i = 0; i < 5; i++) { /* 00 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 6; i++) { /* 00 */
|
for (i = 0; i < 6; i++) { /* 00 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u13); /* 1101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u13); /* 1101 */
|
||||||
for (i = 0; i < 4; i++) { /* 0 */
|
for (i = 0; i < 4; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u5); /* 101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u5); /* 101 */
|
||||||
for (i = 0; i < 3; i++) {
|
for (i = 0; i < 3; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 5; i++) { /* 0 */
|
for (i = 0; i < 5; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u9); /* 1001 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u9); /* 1001 */
|
||||||
for (i = 0; i < 6; i++) { /* 000 */
|
for (i = 0; i < 6; i++) { /* 000 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u5); /* 101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u5); /* 101 */
|
||||||
for (i = 0; i < 10; i++) { /* 0000000 */
|
for (i = 0; i < 10; i++) { /* 0000000 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 4; i++) { /* 0 */
|
for (i = 0; i < 4; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x3); /* 111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x3); /* 111 */
|
||||||
for (i = 0; i < 9; i++) { /* 0 */
|
for (i = 0; i < 9; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x8); /* 11111111 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x8); /* 11111111 */
|
||||||
for (i = 0; i < 5; i++) { /* 0 */
|
for (i = 0; i < 5; i++) { /* 0 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u9); /* 1001 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u9); /* 1001 */
|
||||||
for (i = 0; i < 6; i++) { /* 00 */
|
for (i = 0; i < 6; i++) { /* 00 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u11); /* 1011 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u11); /* 1011 */
|
||||||
for (i = 0; i < 4; i++) {
|
for (i = 0; i < 4; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u13); /* 1101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u13); /* 1101 */
|
||||||
for (i = 0; i < 5; i++) {
|
for (i = 0; i < 5; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &x2); /* 11 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &x2); /* 11 */
|
||||||
for (i = 0; i < 6; i++) { /* 00 */
|
for (i = 0; i < 6; i++) { /* 00 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u13); /* 1101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u13); /* 1101 */
|
||||||
for (i = 0; i < 10; i++) { /* 000000 */
|
for (i = 0; i < 10; i++) { /* 000000 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u13); /* 1101 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u13); /* 1101 */
|
||||||
for (i = 0; i < 4; i++) {
|
for (i = 0; i < 4; i++) {
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, &u9); /* 1001 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, &u9); /* 1001 */
|
||||||
for (i = 0; i < 6; i++) { /* 00000 */
|
for (i = 0; i < 6; i++) { /* 00000 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(t, t, x); /* 1 */
|
rustsecp256k1_v0_1_1_scalar_mul(t, t, x); /* 1 */
|
||||||
for (i = 0; i < 8; i++) { /* 00 */
|
for (i = 0; i < 8; i++) { /* 00 */
|
||||||
rustsecp256k1_v0_1_0_scalar_sqr(t, t);
|
rustsecp256k1_v0_1_1_scalar_sqr(t, t);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(r, t, &x6); /* 111111 */
|
rustsecp256k1_v0_1_1_scalar_mul(r, t, &x6); /* 111111 */
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_even(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_even(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return !(a->d[0] & 1);
|
return !(a->d[0] & 1);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_inverse_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *x) {
|
static void rustsecp256k1_v0_1_1_scalar_inverse_var(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *x) {
|
||||||
#if defined(USE_SCALAR_INV_BUILTIN)
|
#if defined(USE_SCALAR_INV_BUILTIN)
|
||||||
rustsecp256k1_v0_1_0_scalar_inverse(r, x);
|
rustsecp256k1_v0_1_1_scalar_inverse(r, x);
|
||||||
#elif defined(USE_SCALAR_INV_NUM)
|
#elif defined(USE_SCALAR_INV_NUM)
|
||||||
unsigned char b[32];
|
unsigned char b[32];
|
||||||
rustsecp256k1_v0_1_0_num n, m;
|
rustsecp256k1_v0_1_1_num n, m;
|
||||||
rustsecp256k1_v0_1_0_scalar t = *x;
|
rustsecp256k1_v0_1_1_scalar t = *x;
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(b, &t);
|
rustsecp256k1_v0_1_1_scalar_get_b32(b, &t);
|
||||||
rustsecp256k1_v0_1_0_num_set_bin(&n, b, 32);
|
rustsecp256k1_v0_1_1_num_set_bin(&n, b, 32);
|
||||||
rustsecp256k1_v0_1_0_scalar_order_get_num(&m);
|
rustsecp256k1_v0_1_1_scalar_order_get_num(&m);
|
||||||
rustsecp256k1_v0_1_0_num_mod_inverse(&n, &n, &m);
|
rustsecp256k1_v0_1_1_num_mod_inverse(&n, &n, &m);
|
||||||
rustsecp256k1_v0_1_0_num_get_bin(b, 32, &n);
|
rustsecp256k1_v0_1_1_num_get_bin(b, 32, &n);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(r, b, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(r, b, NULL);
|
||||||
/* Verify that the inverse was computed correctly, without GMP code. */
|
/* Verify that the inverse was computed correctly, without GMP code. */
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&t, &t, r);
|
rustsecp256k1_v0_1_1_scalar_mul(&t, &t, r);
|
||||||
CHECK(rustsecp256k1_v0_1_0_scalar_is_one(&t));
|
CHECK(rustsecp256k1_v0_1_1_scalar_is_one(&t));
|
||||||
#else
|
#else
|
||||||
#error "Please select scalar inverse implementation"
|
#error "Please select scalar inverse implementation"
|
||||||
#endif
|
#endif
|
||||||
|
@ -251,7 +251,7 @@ static void rustsecp256k1_v0_1_0_scalar_inverse_var(rustsecp256k1_v0_1_0_scalar
|
||||||
* nontrivial to get full test coverage for the exhaustive tests. We therefore
|
* nontrivial to get full test coverage for the exhaustive tests. We therefore
|
||||||
* (arbitrarily) set k2 = k + 5 and k1 = k - k2 * lambda.
|
* (arbitrarily) set k2 = k + 5 and k1 = k - k2 * lambda.
|
||||||
*/
|
*/
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_lambda(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_split_lambda(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
*r2 = (*a + 5) % EXHAUSTIVE_TEST_ORDER;
|
*r2 = (*a + 5) % EXHAUSTIVE_TEST_ORDER;
|
||||||
*r1 = (*a + (EXHAUSTIVE_TEST_ORDER - *r2) * EXHAUSTIVE_TEST_LAMBDA) % EXHAUSTIVE_TEST_ORDER;
|
*r1 = (*a + (EXHAUSTIVE_TEST_ORDER - *r2) * EXHAUSTIVE_TEST_LAMBDA) % EXHAUSTIVE_TEST_ORDER;
|
||||||
}
|
}
|
||||||
|
@ -294,38 +294,38 @@ static void rustsecp256k1_v0_1_0_scalar_split_lambda(rustsecp256k1_v0_1_0_scalar
|
||||||
* The function below splits a in r1 and r2, such that r1 + lambda * r2 == a (mod order).
|
* The function below splits a in r1 and r2, such that r1 + lambda * r2 == a (mod order).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_lambda(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_split_lambda(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
rustsecp256k1_v0_1_0_scalar c1, c2;
|
rustsecp256k1_v0_1_1_scalar c1, c2;
|
||||||
static const rustsecp256k1_v0_1_0_scalar minus_lambda = SECP256K1_SCALAR_CONST(
|
static const rustsecp256k1_v0_1_1_scalar minus_lambda = SECP256K1_SCALAR_CONST(
|
||||||
0xAC9C52B3UL, 0x3FA3CF1FUL, 0x5AD9E3FDUL, 0x77ED9BA4UL,
|
0xAC9C52B3UL, 0x3FA3CF1FUL, 0x5AD9E3FDUL, 0x77ED9BA4UL,
|
||||||
0xA880B9FCUL, 0x8EC739C2UL, 0xE0CFC810UL, 0xB51283CFUL
|
0xA880B9FCUL, 0x8EC739C2UL, 0xE0CFC810UL, 0xB51283CFUL
|
||||||
);
|
);
|
||||||
static const rustsecp256k1_v0_1_0_scalar minus_b1 = SECP256K1_SCALAR_CONST(
|
static const rustsecp256k1_v0_1_1_scalar minus_b1 = SECP256K1_SCALAR_CONST(
|
||||||
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x00000000UL,
|
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x00000000UL,
|
||||||
0xE4437ED6UL, 0x010E8828UL, 0x6F547FA9UL, 0x0ABFE4C3UL
|
0xE4437ED6UL, 0x010E8828UL, 0x6F547FA9UL, 0x0ABFE4C3UL
|
||||||
);
|
);
|
||||||
static const rustsecp256k1_v0_1_0_scalar minus_b2 = SECP256K1_SCALAR_CONST(
|
static const rustsecp256k1_v0_1_1_scalar minus_b2 = SECP256K1_SCALAR_CONST(
|
||||||
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL,
|
0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL,
|
||||||
0x8A280AC5UL, 0x0774346DUL, 0xD765CDA8UL, 0x3DB1562CUL
|
0x8A280AC5UL, 0x0774346DUL, 0xD765CDA8UL, 0x3DB1562CUL
|
||||||
);
|
);
|
||||||
static const rustsecp256k1_v0_1_0_scalar g1 = SECP256K1_SCALAR_CONST(
|
static const rustsecp256k1_v0_1_1_scalar g1 = SECP256K1_SCALAR_CONST(
|
||||||
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x00003086UL,
|
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x00003086UL,
|
||||||
0xD221A7D4UL, 0x6BCDE86CUL, 0x90E49284UL, 0xEB153DABUL
|
0xD221A7D4UL, 0x6BCDE86CUL, 0x90E49284UL, 0xEB153DABUL
|
||||||
);
|
);
|
||||||
static const rustsecp256k1_v0_1_0_scalar g2 = SECP256K1_SCALAR_CONST(
|
static const rustsecp256k1_v0_1_1_scalar g2 = SECP256K1_SCALAR_CONST(
|
||||||
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x0000E443UL,
|
0x00000000UL, 0x00000000UL, 0x00000000UL, 0x0000E443UL,
|
||||||
0x7ED6010EUL, 0x88286F54UL, 0x7FA90ABFUL, 0xE4C42212UL
|
0x7ED6010EUL, 0x88286F54UL, 0x7FA90ABFUL, 0xE4C42212UL
|
||||||
);
|
);
|
||||||
VERIFY_CHECK(r1 != a);
|
VERIFY_CHECK(r1 != a);
|
||||||
VERIFY_CHECK(r2 != a);
|
VERIFY_CHECK(r2 != a);
|
||||||
/* these _var calls are constant time since the shift amount is constant */
|
/* these _var calls are constant time since the shift amount is constant */
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_shift_var(&c1, a, &g1, 272);
|
rustsecp256k1_v0_1_1_scalar_mul_shift_var(&c1, a, &g1, 272);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul_shift_var(&c2, a, &g2, 272);
|
rustsecp256k1_v0_1_1_scalar_mul_shift_var(&c2, a, &g2, 272);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&c1, &c1, &minus_b1);
|
rustsecp256k1_v0_1_1_scalar_mul(&c1, &c1, &minus_b1);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&c2, &c2, &minus_b2);
|
rustsecp256k1_v0_1_1_scalar_mul(&c2, &c2, &minus_b2);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(r2, &c1, &c2);
|
rustsecp256k1_v0_1_1_scalar_add(r2, &c1, &c2);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(r1, r2, &minus_lambda);
|
rustsecp256k1_v0_1_1_scalar_mul(r1, r2, &minus_lambda);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(r1, r1, a);
|
rustsecp256k1_v0_1_1_scalar_add(r1, r1, a);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -10,6 +10,6 @@
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
/** A scalar modulo the group order of the secp256k1 curve. */
|
/** A scalar modulo the group order of the secp256k1 curve. */
|
||||||
typedef uint32_t rustsecp256k1_v0_1_0_scalar;
|
typedef uint32_t rustsecp256k1_v0_1_1_scalar;
|
||||||
|
|
||||||
#endif /* SECP256K1_SCALAR_REPR_H */
|
#endif /* SECP256K1_SCALAR_REPR_H */
|
||||||
|
|
|
@ -11,40 +11,40 @@
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_even(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_even(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return !(*a & 1);
|
return !(*a & 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_1_0_scalar *r) { *r = 0; }
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_clear(rustsecp256k1_v0_1_1_scalar *r) { *r = 0; }
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v0_1_0_scalar *r, unsigned int v) { *r = v; }
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_set_int(rustsecp256k1_v0_1_1_scalar *r, unsigned int v) { *r = v; }
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
if (offset < 32)
|
if (offset < 32)
|
||||||
return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
|
return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
|
||||||
else
|
else
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits_var(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits_var(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
||||||
return rustsecp256k1_v0_1_0_scalar_get_bits(a, offset, count);
|
return rustsecp256k1_v0_1_1_scalar_get_bits(a, offset, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_check_overflow(const rustsecp256k1_v0_1_0_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_check_overflow(const rustsecp256k1_v0_1_1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static int rustsecp256k1_v0_1_1_scalar_add(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
*r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
|
*r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
|
||||||
return *r < *b;
|
return *r < *b;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag) {
|
static void rustsecp256k1_v0_1_1_scalar_cadd_bit(rustsecp256k1_v0_1_1_scalar *r, unsigned int bit, int flag) {
|
||||||
if (flag && bit < 32)
|
if (flag && bit < 32)
|
||||||
*r += (1 << bit);
|
*r += (1 << bit);
|
||||||
#ifdef VERIFY
|
#ifdef VERIFY
|
||||||
VERIFY_CHECK(rustsecp256k1_v0_1_0_scalar_check_overflow(r) == 0);
|
VERIFY_CHECK(rustsecp256k1_v0_1_1_scalar_check_overflow(r) == 0);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r, const unsigned char *b32, int *overflow) {
|
static void rustsecp256k1_v0_1_1_scalar_set_b32(rustsecp256k1_v0_1_1_scalar *r, const unsigned char *b32, int *overflow) {
|
||||||
const int base = 0x100 % EXHAUSTIVE_TEST_ORDER;
|
const int base = 0x100 % EXHAUSTIVE_TEST_ORDER;
|
||||||
int i;
|
int i;
|
||||||
*r = 0;
|
*r = 0;
|
||||||
|
@ -55,16 +55,16 @@ static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r,
|
||||||
if (overflow) *overflow = 0;
|
if (overflow) *overflow = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_0_scalar* a) {
|
static void rustsecp256k1_v0_1_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_1_scalar* a) {
|
||||||
memset(bin, 0, 32);
|
memset(bin, 0, 32);
|
||||||
bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
|
bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_zero(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_zero(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return *a == 0;
|
return *a == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_negate(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
if (*a == 0) {
|
if (*a == 0) {
|
||||||
*r = 0;
|
*r = 0;
|
||||||
} else {
|
} else {
|
||||||
|
@ -72,24 +72,24 @@ static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, c
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_is_one(const rustsecp256k1_v0_1_0_scalar *a) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_one(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return *a == 1;
|
return *a == 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar *a) {
|
static int rustsecp256k1_v0_1_1_scalar_is_high(const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
return *a > EXHAUSTIVE_TEST_ORDER / 2;
|
return *a > EXHAUSTIVE_TEST_ORDER / 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *r, int flag) {
|
static int rustsecp256k1_v0_1_1_scalar_cond_negate(rustsecp256k1_v0_1_1_scalar *r, int flag) {
|
||||||
if (flag) rustsecp256k1_v0_1_0_scalar_negate(r, r);
|
if (flag) rustsecp256k1_v0_1_1_scalar_negate(r, r);
|
||||||
return flag ? -1 : 1;
|
return flag ? -1 : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_mul(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
static void rustsecp256k1_v0_1_1_scalar_mul(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
*r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
|
*r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, int n) {
|
static int rustsecp256k1_v0_1_1_scalar_shr_int(rustsecp256k1_v0_1_1_scalar *r, int n) {
|
||||||
int ret;
|
int ret;
|
||||||
VERIFY_CHECK(n > 0);
|
VERIFY_CHECK(n > 0);
|
||||||
VERIFY_CHECK(n < 16);
|
VERIFY_CHECK(n < 16);
|
||||||
|
@ -98,16 +98,16 @@ static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, i
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_sqr(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_sqr(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
*r = (*a * *a) % EXHAUSTIVE_TEST_ORDER;
|
*r = (*a * *a) % EXHAUSTIVE_TEST_ORDER;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a) {
|
static void rustsecp256k1_v0_1_1_scalar_split_128(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
||||||
*r1 = *a;
|
*r1 = *a;
|
||||||
*r2 = 0;
|
*r2 = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static int rustsecp256k1_v0_1_0_scalar_eq(const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b) {
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_eq(const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
||||||
return *a == *b;
|
return *a == *b;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
/* The typedef is used internally; the struct name is used in the public API
|
/* The typedef is used internally; the struct name is used in the public API
|
||||||
* (where it is exposed as a different typedef) */
|
* (where it is exposed as a different typedef) */
|
||||||
typedef struct rustsecp256k1_v0_1_0_scratch_space_struct {
|
typedef struct rustsecp256k1_v0_1_1_scratch_space_struct {
|
||||||
/** guard against interpreting this object as other types */
|
/** guard against interpreting this object as other types */
|
||||||
unsigned char magic[8];
|
unsigned char magic[8];
|
||||||
/** actual allocated data */
|
/** actual allocated data */
|
||||||
|
@ -19,24 +19,24 @@ typedef struct rustsecp256k1_v0_1_0_scratch_space_struct {
|
||||||
size_t alloc_size;
|
size_t alloc_size;
|
||||||
/** maximum size available to allocate */
|
/** maximum size available to allocate */
|
||||||
size_t max_size;
|
size_t max_size;
|
||||||
} rustsecp256k1_v0_1_0_scratch;
|
} rustsecp256k1_v0_1_1_scratch;
|
||||||
|
|
||||||
static rustsecp256k1_v0_1_0_scratch* rustsecp256k1_v0_1_0_scratch_create(const rustsecp256k1_v0_1_0_callback* error_callback, size_t max_size);
|
static rustsecp256k1_v0_1_1_scratch* rustsecp256k1_v0_1_1_scratch_create(const rustsecp256k1_v0_1_1_callback* error_callback, size_t max_size);
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scratch_destroy(const rustsecp256k1_v0_1_0_callback* error_callback, rustsecp256k1_v0_1_0_scratch* scratch);
|
static void rustsecp256k1_v0_1_1_scratch_destroy(const rustsecp256k1_v0_1_1_callback* error_callback, rustsecp256k1_v0_1_1_scratch* scratch);
|
||||||
|
|
||||||
/** Returns an opaque object used to "checkpoint" a scratch space. Used
|
/** Returns an opaque object used to "checkpoint" a scratch space. Used
|
||||||
* with `rustsecp256k1_v0_1_0_scratch_apply_checkpoint` to undo allocations. */
|
* with `rustsecp256k1_v0_1_1_scratch_apply_checkpoint` to undo allocations. */
|
||||||
static size_t rustsecp256k1_v0_1_0_scratch_checkpoint(const rustsecp256k1_v0_1_0_callback* error_callback, const rustsecp256k1_v0_1_0_scratch* scratch);
|
static size_t rustsecp256k1_v0_1_1_scratch_checkpoint(const rustsecp256k1_v0_1_1_callback* error_callback, const rustsecp256k1_v0_1_1_scratch* scratch);
|
||||||
|
|
||||||
/** Applies a check point received from `rustsecp256k1_v0_1_0_scratch_checkpoint`,
|
/** Applies a check point received from `rustsecp256k1_v0_1_1_scratch_checkpoint`,
|
||||||
* undoing all allocations since that point. */
|
* undoing all allocations since that point. */
|
||||||
static void rustsecp256k1_v0_1_0_scratch_apply_checkpoint(const rustsecp256k1_v0_1_0_callback* error_callback, rustsecp256k1_v0_1_0_scratch* scratch, size_t checkpoint);
|
static void rustsecp256k1_v0_1_1_scratch_apply_checkpoint(const rustsecp256k1_v0_1_1_callback* error_callback, rustsecp256k1_v0_1_1_scratch* scratch, size_t checkpoint);
|
||||||
|
|
||||||
/** Returns the maximum allocation the scratch space will allow */
|
/** Returns the maximum allocation the scratch space will allow */
|
||||||
static size_t rustsecp256k1_v0_1_0_scratch_max_allocation(const rustsecp256k1_v0_1_0_callback* error_callback, const rustsecp256k1_v0_1_0_scratch* scratch, size_t n_objects);
|
static size_t rustsecp256k1_v0_1_1_scratch_max_allocation(const rustsecp256k1_v0_1_1_callback* error_callback, const rustsecp256k1_v0_1_1_scratch* scratch, size_t n_objects);
|
||||||
|
|
||||||
/** Returns a pointer into the most recently allocated frame, or NULL if there is insufficient available space */
|
/** Returns a pointer into the most recently allocated frame, or NULL if there is insufficient available space */
|
||||||
static void *rustsecp256k1_v0_1_0_scratch_alloc(const rustsecp256k1_v0_1_0_callback* error_callback, rustsecp256k1_v0_1_0_scratch* scratch, size_t n);
|
static void *rustsecp256k1_v0_1_1_scratch_alloc(const rustsecp256k1_v0_1_1_callback* error_callback, rustsecp256k1_v0_1_1_scratch* scratch, size_t n);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -10,29 +10,29 @@
|
||||||
#include "util.h"
|
#include "util.h"
|
||||||
#include "scratch.h"
|
#include "scratch.h"
|
||||||
|
|
||||||
static size_t rustsecp256k1_v0_1_0_scratch_checkpoint(const rustsecp256k1_v0_1_0_callback* error_callback, const rustsecp256k1_v0_1_0_scratch* scratch) {
|
static size_t rustsecp256k1_v0_1_1_scratch_checkpoint(const rustsecp256k1_v0_1_1_callback* error_callback, const rustsecp256k1_v0_1_1_scratch* scratch) {
|
||||||
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(error_callback, "invalid scratch space");
|
rustsecp256k1_v0_1_1_callback_call(error_callback, "invalid scratch space");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return scratch->alloc_size;
|
return scratch->alloc_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_scratch_apply_checkpoint(const rustsecp256k1_v0_1_0_callback* error_callback, rustsecp256k1_v0_1_0_scratch* scratch, size_t checkpoint) {
|
static void rustsecp256k1_v0_1_1_scratch_apply_checkpoint(const rustsecp256k1_v0_1_1_callback* error_callback, rustsecp256k1_v0_1_1_scratch* scratch, size_t checkpoint) {
|
||||||
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(error_callback, "invalid scratch space");
|
rustsecp256k1_v0_1_1_callback_call(error_callback, "invalid scratch space");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (checkpoint > scratch->alloc_size) {
|
if (checkpoint > scratch->alloc_size) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(error_callback, "invalid checkpoint");
|
rustsecp256k1_v0_1_1_callback_call(error_callback, "invalid checkpoint");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
scratch->alloc_size = checkpoint;
|
scratch->alloc_size = checkpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t rustsecp256k1_v0_1_0_scratch_max_allocation(const rustsecp256k1_v0_1_0_callback* error_callback, const rustsecp256k1_v0_1_0_scratch* scratch, size_t objects) {
|
static size_t rustsecp256k1_v0_1_1_scratch_max_allocation(const rustsecp256k1_v0_1_1_callback* error_callback, const rustsecp256k1_v0_1_1_scratch* scratch, size_t objects) {
|
||||||
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(error_callback, "invalid scratch space");
|
rustsecp256k1_v0_1_1_callback_call(error_callback, "invalid scratch space");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (scratch->max_size - scratch->alloc_size <= objects * (ALIGNMENT - 1)) {
|
if (scratch->max_size - scratch->alloc_size <= objects * (ALIGNMENT - 1)) {
|
||||||
|
@ -41,12 +41,12 @@ static size_t rustsecp256k1_v0_1_0_scratch_max_allocation(const rustsecp256k1_v0
|
||||||
return scratch->max_size - scratch->alloc_size - objects * (ALIGNMENT - 1);
|
return scratch->max_size - scratch->alloc_size - objects * (ALIGNMENT - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *rustsecp256k1_v0_1_0_scratch_alloc(const rustsecp256k1_v0_1_0_callback* error_callback, rustsecp256k1_v0_1_0_scratch* scratch, size_t size) {
|
static void *rustsecp256k1_v0_1_1_scratch_alloc(const rustsecp256k1_v0_1_1_callback* error_callback, rustsecp256k1_v0_1_1_scratch* scratch, size_t size) {
|
||||||
void *ret;
|
void *ret;
|
||||||
size = ROUND_TO_ALIGN(size);
|
size = ROUND_TO_ALIGN(size);
|
||||||
|
|
||||||
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
if (memcmp(scratch->magic, "scratch", 8) != 0) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(error_callback, "invalid scratch space");
|
rustsecp256k1_v0_1_1_callback_call(error_callback, "invalid scratch space");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,65 +22,65 @@
|
||||||
|
|
||||||
#define ARG_CHECK(cond) do { \
|
#define ARG_CHECK(cond) do { \
|
||||||
if (EXPECT(!(cond), 0)) { \
|
if (EXPECT(!(cond), 0)) { \
|
||||||
rustsecp256k1_v0_1_0_callback_call(&ctx->illegal_callback, #cond); \
|
rustsecp256k1_v0_1_1_callback_call(&ctx->illegal_callback, #cond); \
|
||||||
return 0; \
|
return 0; \
|
||||||
} \
|
} \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
#define ARG_CHECK_NO_RETURN(cond) do { \
|
#define ARG_CHECK_NO_RETURN(cond) do { \
|
||||||
if (EXPECT(!(cond), 0)) { \
|
if (EXPECT(!(cond), 0)) { \
|
||||||
rustsecp256k1_v0_1_0_callback_call(&ctx->illegal_callback, #cond); \
|
rustsecp256k1_v0_1_1_callback_call(&ctx->illegal_callback, #cond); \
|
||||||
} \
|
} \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
#ifndef USE_EXTERNAL_DEFAULT_CALLBACKS
|
#ifndef USE_EXTERNAL_DEFAULT_CALLBACKS
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
static void rustsecp256k1_v0_1_0_default_illegal_callback_fn(const char* str, void* data) {
|
static void rustsecp256k1_v0_1_1_default_illegal_callback_fn(const char* str, void* data) {
|
||||||
(void)data;
|
(void)data;
|
||||||
fprintf(stderr, "[libsecp256k1] illegal argument: %s\n", str);
|
fprintf(stderr, "[libsecp256k1] illegal argument: %s\n", str);
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
static void rustsecp256k1_v0_1_0_default_error_callback_fn(const char* str, void* data) {
|
static void rustsecp256k1_v0_1_1_default_error_callback_fn(const char* str, void* data) {
|
||||||
(void)data;
|
(void)data;
|
||||||
fprintf(stderr, "[libsecp256k1] internal consistency check failed: %s\n", str);
|
fprintf(stderr, "[libsecp256k1] internal consistency check failed: %s\n", str);
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
void rustsecp256k1_v0_1_0_default_illegal_callback_fn(const char* str, void* data);
|
void rustsecp256k1_v0_1_1_default_illegal_callback_fn(const char* str, void* data);
|
||||||
void rustsecp256k1_v0_1_0_default_error_callback_fn(const char* str, void* data);
|
void rustsecp256k1_v0_1_1_default_error_callback_fn(const char* str, void* data);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static const rustsecp256k1_v0_1_0_callback default_illegal_callback = {
|
static const rustsecp256k1_v0_1_1_callback default_illegal_callback = {
|
||||||
rustsecp256k1_v0_1_0_default_illegal_callback_fn,
|
rustsecp256k1_v0_1_1_default_illegal_callback_fn,
|
||||||
NULL
|
NULL
|
||||||
};
|
};
|
||||||
|
|
||||||
static const rustsecp256k1_v0_1_0_callback default_error_callback = {
|
static const rustsecp256k1_v0_1_1_callback default_error_callback = {
|
||||||
rustsecp256k1_v0_1_0_default_error_callback_fn,
|
rustsecp256k1_v0_1_1_default_error_callback_fn,
|
||||||
NULL
|
NULL
|
||||||
};
|
};
|
||||||
|
|
||||||
struct rustsecp256k1_v0_1_0_context_struct {
|
struct rustsecp256k1_v0_1_1_context_struct {
|
||||||
rustsecp256k1_v0_1_0_ecmult_context ecmult_ctx;
|
rustsecp256k1_v0_1_1_ecmult_context ecmult_ctx;
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context ecmult_gen_ctx;
|
rustsecp256k1_v0_1_1_ecmult_gen_context ecmult_gen_ctx;
|
||||||
rustsecp256k1_v0_1_0_callback illegal_callback;
|
rustsecp256k1_v0_1_1_callback illegal_callback;
|
||||||
rustsecp256k1_v0_1_0_callback error_callback;
|
rustsecp256k1_v0_1_1_callback error_callback;
|
||||||
};
|
};
|
||||||
|
|
||||||
static const rustsecp256k1_v0_1_0_context rustsecp256k1_v0_1_0_context_no_precomp_ = {
|
static const rustsecp256k1_v0_1_1_context rustsecp256k1_v0_1_1_context_no_precomp_ = {
|
||||||
{ 0 },
|
{ 0 },
|
||||||
{ 0 },
|
{ 0 },
|
||||||
{ rustsecp256k1_v0_1_0_default_illegal_callback_fn, 0 },
|
{ rustsecp256k1_v0_1_1_default_illegal_callback_fn, 0 },
|
||||||
{ rustsecp256k1_v0_1_0_default_error_callback_fn, 0 }
|
{ rustsecp256k1_v0_1_1_default_error_callback_fn, 0 }
|
||||||
};
|
};
|
||||||
const rustsecp256k1_v0_1_0_context *rustsecp256k1_v0_1_0_context_no_precomp = &rustsecp256k1_v0_1_0_context_no_precomp_;
|
const rustsecp256k1_v0_1_1_context *rustsecp256k1_v0_1_1_context_no_precomp = &rustsecp256k1_v0_1_1_context_no_precomp_;
|
||||||
|
|
||||||
size_t rustsecp256k1_v0_1_0_context_preallocated_size(unsigned int flags) {
|
size_t rustsecp256k1_v0_1_1_context_preallocated_size(unsigned int flags) {
|
||||||
size_t ret = ROUND_TO_ALIGN(sizeof(rustsecp256k1_v0_1_0_context));
|
size_t ret = ROUND_TO_ALIGN(sizeof(rustsecp256k1_v0_1_1_context));
|
||||||
|
|
||||||
if (EXPECT((flags & SECP256K1_FLAGS_TYPE_MASK) != SECP256K1_FLAGS_TYPE_CONTEXT, 0)) {
|
if (EXPECT((flags & SECP256K1_FLAGS_TYPE_MASK) != SECP256K1_FLAGS_TYPE_CONTEXT, 0)) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(&default_illegal_callback,
|
rustsecp256k1_v0_1_1_callback_call(&default_illegal_callback,
|
||||||
"Invalid flags");
|
"Invalid flags");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -94,138 +94,138 @@ size_t rustsecp256k1_v0_1_0_context_preallocated_size(unsigned int flags) {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t rustsecp256k1_v0_1_0_context_preallocated_clone_size(const rustsecp256k1_v0_1_0_context* ctx) {
|
size_t rustsecp256k1_v0_1_1_context_preallocated_clone_size(const rustsecp256k1_v0_1_1_context* ctx) {
|
||||||
size_t ret = ROUND_TO_ALIGN(sizeof(rustsecp256k1_v0_1_0_context));
|
size_t ret = ROUND_TO_ALIGN(sizeof(rustsecp256k1_v0_1_1_context));
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
if (rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) {
|
if (rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) {
|
||||||
ret += SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
ret += SECP256K1_ECMULT_GEN_CONTEXT_PREALLOCATED_SIZE;
|
||||||
}
|
}
|
||||||
if (rustsecp256k1_v0_1_0_ecmult_context_is_built(&ctx->ecmult_ctx)) {
|
if (rustsecp256k1_v0_1_1_ecmult_context_is_built(&ctx->ecmult_ctx)) {
|
||||||
ret += SECP256K1_ECMULT_CONTEXT_PREALLOCATED_SIZE;
|
ret += SECP256K1_ECMULT_CONTEXT_PREALLOCATED_SIZE;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallocated_create(void* prealloc, unsigned int flags) {
|
rustsecp256k1_v0_1_1_context* rustsecp256k1_v0_1_1_context_preallocated_create(void* prealloc, unsigned int flags) {
|
||||||
void* const base = prealloc;
|
void* const base = prealloc;
|
||||||
size_t prealloc_size;
|
size_t prealloc_size;
|
||||||
rustsecp256k1_v0_1_0_context* ret;
|
rustsecp256k1_v0_1_1_context* ret;
|
||||||
|
|
||||||
VERIFY_CHECK(prealloc != NULL);
|
VERIFY_CHECK(prealloc != NULL);
|
||||||
prealloc_size = rustsecp256k1_v0_1_0_context_preallocated_size(flags);
|
prealloc_size = rustsecp256k1_v0_1_1_context_preallocated_size(flags);
|
||||||
ret = (rustsecp256k1_v0_1_0_context*)manual_alloc(&prealloc, sizeof(rustsecp256k1_v0_1_0_context), base, prealloc_size);
|
ret = (rustsecp256k1_v0_1_1_context*)manual_alloc(&prealloc, sizeof(rustsecp256k1_v0_1_1_context), base, prealloc_size);
|
||||||
ret->illegal_callback = default_illegal_callback;
|
ret->illegal_callback = default_illegal_callback;
|
||||||
ret->error_callback = default_error_callback;
|
ret->error_callback = default_error_callback;
|
||||||
|
|
||||||
if (EXPECT((flags & SECP256K1_FLAGS_TYPE_MASK) != SECP256K1_FLAGS_TYPE_CONTEXT, 0)) {
|
if (EXPECT((flags & SECP256K1_FLAGS_TYPE_MASK) != SECP256K1_FLAGS_TYPE_CONTEXT, 0)) {
|
||||||
rustsecp256k1_v0_1_0_callback_call(&ret->illegal_callback,
|
rustsecp256k1_v0_1_1_callback_call(&ret->illegal_callback,
|
||||||
"Invalid flags");
|
"Invalid flags");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecmult_context_init(&ret->ecmult_ctx);
|
rustsecp256k1_v0_1_1_ecmult_context_init(&ret->ecmult_ctx);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_init(&ret->ecmult_gen_ctx);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_init(&ret->ecmult_gen_ctx);
|
||||||
|
|
||||||
if (flags & SECP256K1_FLAGS_BIT_CONTEXT_SIGN) {
|
if (flags & SECP256K1_FLAGS_BIT_CONTEXT_SIGN) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_build(&ret->ecmult_gen_ctx, &prealloc);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_build(&ret->ecmult_gen_ctx, &prealloc);
|
||||||
}
|
}
|
||||||
if (flags & SECP256K1_FLAGS_BIT_CONTEXT_VERIFY) {
|
if (flags & SECP256K1_FLAGS_BIT_CONTEXT_VERIFY) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_context_build(&ret->ecmult_ctx, &prealloc);
|
rustsecp256k1_v0_1_1_ecmult_context_build(&ret->ecmult_ctx, &prealloc);
|
||||||
}
|
}
|
||||||
|
|
||||||
return (rustsecp256k1_v0_1_0_context*) ret;
|
return (rustsecp256k1_v0_1_1_context*) ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context* rustsecp256k1_v0_1_0_context_preallocated_clone(const rustsecp256k1_v0_1_0_context* ctx, void* prealloc) {
|
rustsecp256k1_v0_1_1_context* rustsecp256k1_v0_1_1_context_preallocated_clone(const rustsecp256k1_v0_1_1_context* ctx, void* prealloc) {
|
||||||
size_t prealloc_size;
|
size_t prealloc_size;
|
||||||
rustsecp256k1_v0_1_0_context* ret;
|
rustsecp256k1_v0_1_1_context* ret;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(prealloc != NULL);
|
ARG_CHECK(prealloc != NULL);
|
||||||
|
|
||||||
prealloc_size = rustsecp256k1_v0_1_0_context_preallocated_clone_size(ctx);
|
prealloc_size = rustsecp256k1_v0_1_1_context_preallocated_clone_size(ctx);
|
||||||
ret = (rustsecp256k1_v0_1_0_context*)prealloc;
|
ret = (rustsecp256k1_v0_1_1_context*)prealloc;
|
||||||
memcpy(ret, ctx, prealloc_size);
|
memcpy(ret, ctx, prealloc_size);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_finalize_memcpy(&ret->ecmult_gen_ctx, &ctx->ecmult_gen_ctx);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_finalize_memcpy(&ret->ecmult_gen_ctx, &ctx->ecmult_gen_ctx);
|
||||||
rustsecp256k1_v0_1_0_ecmult_context_finalize_memcpy(&ret->ecmult_ctx, &ctx->ecmult_ctx);
|
rustsecp256k1_v0_1_1_ecmult_context_finalize_memcpy(&ret->ecmult_ctx, &ctx->ecmult_ctx);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
void rustsecp256k1_v0_1_0_context_preallocated_destroy(rustsecp256k1_v0_1_0_context* ctx) {
|
void rustsecp256k1_v0_1_1_context_preallocated_destroy(rustsecp256k1_v0_1_1_context* ctx) {
|
||||||
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_0_context_no_precomp);
|
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_1_context_no_precomp);
|
||||||
if (ctx != NULL) {
|
if (ctx != NULL) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_context_clear(&ctx->ecmult_ctx);
|
rustsecp256k1_v0_1_1_ecmult_context_clear(&ctx->ecmult_ctx);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_context_clear(&ctx->ecmult_gen_ctx);
|
rustsecp256k1_v0_1_1_ecmult_gen_context_clear(&ctx->ecmult_gen_ctx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void rustsecp256k1_v0_1_0_context_set_illegal_callback(rustsecp256k1_v0_1_0_context* ctx, void (*fun)(const char* message, void* data), const void* data) {
|
void rustsecp256k1_v0_1_1_context_set_illegal_callback(rustsecp256k1_v0_1_1_context* ctx, void (*fun)(const char* message, void* data), const void* data) {
|
||||||
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_0_context_no_precomp);
|
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_1_context_no_precomp);
|
||||||
if (fun == NULL) {
|
if (fun == NULL) {
|
||||||
fun = rustsecp256k1_v0_1_0_default_illegal_callback_fn;
|
fun = rustsecp256k1_v0_1_1_default_illegal_callback_fn;
|
||||||
}
|
}
|
||||||
ctx->illegal_callback.fn = fun;
|
ctx->illegal_callback.fn = fun;
|
||||||
ctx->illegal_callback.data = data;
|
ctx->illegal_callback.data = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
void rustsecp256k1_v0_1_0_context_set_error_callback(rustsecp256k1_v0_1_0_context* ctx, void (*fun)(const char* message, void* data), const void* data) {
|
void rustsecp256k1_v0_1_1_context_set_error_callback(rustsecp256k1_v0_1_1_context* ctx, void (*fun)(const char* message, void* data), const void* data) {
|
||||||
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_0_context_no_precomp);
|
ARG_CHECK_NO_RETURN(ctx != rustsecp256k1_v0_1_1_context_no_precomp);
|
||||||
if (fun == NULL) {
|
if (fun == NULL) {
|
||||||
fun = rustsecp256k1_v0_1_0_default_error_callback_fn;
|
fun = rustsecp256k1_v0_1_1_default_error_callback_fn;
|
||||||
}
|
}
|
||||||
ctx->error_callback.fn = fun;
|
ctx->error_callback.fn = fun;
|
||||||
ctx->error_callback.data = data;
|
ctx->error_callback.data = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int rustsecp256k1_v0_1_0_pubkey_load(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ge* ge, const rustsecp256k1_v0_1_0_pubkey* pubkey) {
|
static int rustsecp256k1_v0_1_1_pubkey_load(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ge* ge, const rustsecp256k1_v0_1_1_pubkey* pubkey) {
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_ge_storage) == 64) {
|
if (sizeof(rustsecp256k1_v0_1_1_ge_storage) == 64) {
|
||||||
/* When the rustsecp256k1_v0_1_0_ge_storage type is exactly 64 byte, use its
|
/* When the rustsecp256k1_v0_1_1_ge_storage type is exactly 64 byte, use its
|
||||||
* representation inside rustsecp256k1_v0_1_0_pubkey, as conversion is very fast.
|
* representation inside rustsecp256k1_v0_1_1_pubkey, as conversion is very fast.
|
||||||
* Note that rustsecp256k1_v0_1_0_pubkey_save must use the same representation. */
|
* Note that rustsecp256k1_v0_1_1_pubkey_save must use the same representation. */
|
||||||
rustsecp256k1_v0_1_0_ge_storage s;
|
rustsecp256k1_v0_1_1_ge_storage s;
|
||||||
memcpy(&s, &pubkey->data[0], sizeof(s));
|
memcpy(&s, &pubkey->data[0], sizeof(s));
|
||||||
rustsecp256k1_v0_1_0_ge_from_storage(ge, &s);
|
rustsecp256k1_v0_1_1_ge_from_storage(ge, &s);
|
||||||
} else {
|
} else {
|
||||||
/* Otherwise, fall back to 32-byte big endian for X and Y. */
|
/* Otherwise, fall back to 32-byte big endian for X and Y. */
|
||||||
rustsecp256k1_v0_1_0_fe x, y;
|
rustsecp256k1_v0_1_1_fe x, y;
|
||||||
rustsecp256k1_v0_1_0_fe_set_b32(&x, pubkey->data);
|
rustsecp256k1_v0_1_1_fe_set_b32(&x, pubkey->data);
|
||||||
rustsecp256k1_v0_1_0_fe_set_b32(&y, pubkey->data + 32);
|
rustsecp256k1_v0_1_1_fe_set_b32(&y, pubkey->data + 32);
|
||||||
rustsecp256k1_v0_1_0_ge_set_xy(ge, &x, &y);
|
rustsecp256k1_v0_1_1_ge_set_xy(ge, &x, &y);
|
||||||
}
|
}
|
||||||
ARG_CHECK(!rustsecp256k1_v0_1_0_fe_is_zero(&ge->x));
|
ARG_CHECK(!rustsecp256k1_v0_1_1_fe_is_zero(&ge->x));
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_pubkey_save(rustsecp256k1_v0_1_0_pubkey* pubkey, rustsecp256k1_v0_1_0_ge* ge) {
|
static void rustsecp256k1_v0_1_1_pubkey_save(rustsecp256k1_v0_1_1_pubkey* pubkey, rustsecp256k1_v0_1_1_ge* ge) {
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_ge_storage) == 64) {
|
if (sizeof(rustsecp256k1_v0_1_1_ge_storage) == 64) {
|
||||||
rustsecp256k1_v0_1_0_ge_storage s;
|
rustsecp256k1_v0_1_1_ge_storage s;
|
||||||
rustsecp256k1_v0_1_0_ge_to_storage(&s, ge);
|
rustsecp256k1_v0_1_1_ge_to_storage(&s, ge);
|
||||||
memcpy(&pubkey->data[0], &s, sizeof(s));
|
memcpy(&pubkey->data[0], &s, sizeof(s));
|
||||||
} else {
|
} else {
|
||||||
VERIFY_CHECK(!rustsecp256k1_v0_1_0_ge_is_infinity(ge));
|
VERIFY_CHECK(!rustsecp256k1_v0_1_1_ge_is_infinity(ge));
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&ge->x);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&ge->x);
|
||||||
rustsecp256k1_v0_1_0_fe_normalize_var(&ge->y);
|
rustsecp256k1_v0_1_1_fe_normalize_var(&ge->y);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(pubkey->data, &ge->x);
|
rustsecp256k1_v0_1_1_fe_get_b32(pubkey->data, &ge->x);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(pubkey->data + 32, &ge->y);
|
rustsecp256k1_v0_1_1_fe_get_b32(pubkey->data + 32, &ge->y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_parse(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey* pubkey, const unsigned char *input, size_t inputlen) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_parse(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey* pubkey, const unsigned char *input, size_t inputlen) {
|
||||||
rustsecp256k1_v0_1_0_ge Q;
|
rustsecp256k1_v0_1_1_ge Q;
|
||||||
|
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
ARG_CHECK(input != NULL);
|
ARG_CHECK(input != NULL);
|
||||||
if (!rustsecp256k1_v0_1_0_eckey_pubkey_parse(&Q, input, inputlen)) {
|
if (!rustsecp256k1_v0_1_1_eckey_pubkey_parse(&Q, input, inputlen)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &Q);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &Q);
|
||||||
rustsecp256k1_v0_1_0_ge_clear(&Q);
|
rustsecp256k1_v0_1_1_ge_clear(&Q);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_serialize(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1_v0_1_0_pubkey* pubkey, unsigned int flags) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_serialize(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1_v0_1_1_pubkey* pubkey, unsigned int flags) {
|
||||||
rustsecp256k1_v0_1_0_ge Q;
|
rustsecp256k1_v0_1_1_ge Q;
|
||||||
size_t len;
|
size_t len;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
|
@ -238,8 +238,8 @@ int rustsecp256k1_v0_1_0_ec_pubkey_serialize(const rustsecp256k1_v0_1_0_context*
|
||||||
memset(output, 0, len);
|
memset(output, 0, len);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
ARG_CHECK((flags & SECP256K1_FLAGS_TYPE_MASK) == SECP256K1_FLAGS_TYPE_COMPRESSION);
|
ARG_CHECK((flags & SECP256K1_FLAGS_TYPE_MASK) == SECP256K1_FLAGS_TYPE_COMPRESSION);
|
||||||
if (rustsecp256k1_v0_1_0_pubkey_load(ctx, &Q, pubkey)) {
|
if (rustsecp256k1_v0_1_1_pubkey_load(ctx, &Q, pubkey)) {
|
||||||
ret = rustsecp256k1_v0_1_0_eckey_pubkey_serialize(&Q, output, &len, flags & SECP256K1_FLAGS_BIT_COMPRESSION);
|
ret = rustsecp256k1_v0_1_1_eckey_pubkey_serialize(&Q, output, &len, flags & SECP256K1_FLAGS_BIT_COMPRESSION);
|
||||||
if (ret) {
|
if (ret) {
|
||||||
*outputlen = len;
|
*outputlen = len;
|
||||||
}
|
}
|
||||||
|
@ -247,39 +247,39 @@ int rustsecp256k1_v0_1_0_ec_pubkey_serialize(const rustsecp256k1_v0_1_0_context*
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecdsa_signature_load(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_scalar* r, rustsecp256k1_v0_1_0_scalar* s, const rustsecp256k1_v0_1_0_ecdsa_signature* sig) {
|
static void rustsecp256k1_v0_1_1_ecdsa_signature_load(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_scalar* r, rustsecp256k1_v0_1_1_scalar* s, const rustsecp256k1_v0_1_1_ecdsa_signature* sig) {
|
||||||
(void)ctx;
|
(void)ctx;
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_scalar) == 32) {
|
if (sizeof(rustsecp256k1_v0_1_1_scalar) == 32) {
|
||||||
/* When the rustsecp256k1_v0_1_0_scalar type is exactly 32 byte, use its
|
/* When the rustsecp256k1_v0_1_1_scalar type is exactly 32 byte, use its
|
||||||
* representation inside rustsecp256k1_v0_1_0_ecdsa_signature, as conversion is very fast.
|
* representation inside rustsecp256k1_v0_1_1_ecdsa_signature, as conversion is very fast.
|
||||||
* Note that rustsecp256k1_v0_1_0_ecdsa_signature_save must use the same representation. */
|
* Note that rustsecp256k1_v0_1_1_ecdsa_signature_save must use the same representation. */
|
||||||
memcpy(r, &sig->data[0], 32);
|
memcpy(r, &sig->data[0], 32);
|
||||||
memcpy(s, &sig->data[32], 32);
|
memcpy(s, &sig->data[32], 32);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(r, &sig->data[0], NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(r, &sig->data[0], NULL);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(s, &sig->data[32], NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(s, &sig->data[32], NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_ecdsa_signature_save(rustsecp256k1_v0_1_0_ecdsa_signature* sig, const rustsecp256k1_v0_1_0_scalar* r, const rustsecp256k1_v0_1_0_scalar* s) {
|
static void rustsecp256k1_v0_1_1_ecdsa_signature_save(rustsecp256k1_v0_1_1_ecdsa_signature* sig, const rustsecp256k1_v0_1_1_scalar* r, const rustsecp256k1_v0_1_1_scalar* s) {
|
||||||
if (sizeof(rustsecp256k1_v0_1_0_scalar) == 32) {
|
if (sizeof(rustsecp256k1_v0_1_1_scalar) == 32) {
|
||||||
memcpy(&sig->data[0], r, 32);
|
memcpy(&sig->data[0], r, 32);
|
||||||
memcpy(&sig->data[32], s, 32);
|
memcpy(&sig->data[32], s, 32);
|
||||||
} else {
|
} else {
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&sig->data[0], r);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&sig->data[0], r);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&sig->data[32], s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&sig->data[32], s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_parse_der(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
|
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
ARG_CHECK(input != NULL);
|
ARG_CHECK(input != NULL);
|
||||||
|
|
||||||
if (rustsecp256k1_v0_1_0_ecdsa_sig_parse(&r, &s, input, inputlen)) {
|
if (rustsecp256k1_v0_1_1_ecdsa_sig_parse(&r, &s, input, inputlen)) {
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(sig, &r, &s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(sig, &r, &s);
|
||||||
return 1;
|
return 1;
|
||||||
} else {
|
} else {
|
||||||
memset(sig, 0, sizeof(*sig));
|
memset(sig, 0, sizeof(*sig));
|
||||||
|
@ -287,8 +287,8 @@ int rustsecp256k1_v0_1_0_ecdsa_signature_parse_der(const rustsecp256k1_v0_1_0_co
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature* sig, const unsigned char *input64) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature* sig, const unsigned char *input64) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
int ret = 1;
|
int ret = 1;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
|
|
||||||
|
@ -296,77 +296,77 @@ int rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact(const rustsecp256k1_v0_1_
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
ARG_CHECK(input64 != NULL);
|
ARG_CHECK(input64 != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&r, &input64[0], &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&r, &input64[0], &overflow);
|
||||||
ret &= !overflow;
|
ret &= !overflow;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&s, &input64[32], &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&s, &input64[32], &overflow);
|
||||||
ret &= !overflow;
|
ret &= !overflow;
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(sig, &r, &s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(sig, &r, &s);
|
||||||
} else {
|
} else {
|
||||||
memset(sig, 0, sizeof(*sig));
|
memset(sig, 0, sizeof(*sig));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1_v0_1_0_ecdsa_signature* sig) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1_v0_1_1_ecdsa_signature* sig) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
|
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(output != NULL);
|
ARG_CHECK(output != NULL);
|
||||||
ARG_CHECK(outputlen != NULL);
|
ARG_CHECK(outputlen != NULL);
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, sig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, sig);
|
||||||
return rustsecp256k1_v0_1_0_ecdsa_sig_serialize(output, outputlen, &r, &s);
|
return rustsecp256k1_v0_1_1_ecdsa_sig_serialize(output, outputlen, &r, &s);
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_serialize_compact(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *output64, const rustsecp256k1_v0_1_0_ecdsa_signature* sig) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_serialize_compact(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *output64, const rustsecp256k1_v0_1_1_ecdsa_signature* sig) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
|
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(output64 != NULL);
|
ARG_CHECK(output64 != NULL);
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, sig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, sig);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&output64[0], &r);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&output64[0], &r);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(&output64[32], &s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(&output64[32], &s);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_signature_normalize(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature *sigout, const rustsecp256k1_v0_1_0_ecdsa_signature *sigin) {
|
int rustsecp256k1_v0_1_1_ecdsa_signature_normalize(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature *sigout, const rustsecp256k1_v0_1_1_ecdsa_signature *sigin) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(sigin != NULL);
|
ARG_CHECK(sigin != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, sigin);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, sigin);
|
||||||
ret = rustsecp256k1_v0_1_0_scalar_is_high(&s);
|
ret = rustsecp256k1_v0_1_1_scalar_is_high(&s);
|
||||||
if (sigout != NULL) {
|
if (sigout != NULL) {
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&s, &s);
|
rustsecp256k1_v0_1_1_scalar_negate(&s, &s);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(sigout, &r, &s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(sigout, &r, &s);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_verify(const rustsecp256k1_v0_1_0_context* ctx, const rustsecp256k1_v0_1_0_ecdsa_signature *sig, const unsigned char *msg32, const rustsecp256k1_v0_1_0_pubkey *pubkey) {
|
int rustsecp256k1_v0_1_1_ecdsa_verify(const rustsecp256k1_v0_1_1_context* ctx, const rustsecp256k1_v0_1_1_ecdsa_signature *sig, const unsigned char *msg32, const rustsecp256k1_v0_1_1_pubkey *pubkey) {
|
||||||
rustsecp256k1_v0_1_0_ge q;
|
rustsecp256k1_v0_1_1_ge q;
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
rustsecp256k1_v0_1_0_scalar m;
|
rustsecp256k1_v0_1_1_scalar m;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_context_is_built(&ctx->ecmult_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_context_is_built(&ctx->ecmult_ctx));
|
||||||
ARG_CHECK(msg32 != NULL);
|
ARG_CHECK(msg32 != NULL);
|
||||||
ARG_CHECK(sig != NULL);
|
ARG_CHECK(sig != NULL);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&m, msg32, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&m, msg32, NULL);
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, sig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, sig);
|
||||||
return (!rustsecp256k1_v0_1_0_scalar_is_high(&s) &&
|
return (!rustsecp256k1_v0_1_1_scalar_is_high(&s) &&
|
||||||
rustsecp256k1_v0_1_0_pubkey_load(ctx, &q, pubkey) &&
|
rustsecp256k1_v0_1_1_pubkey_load(ctx, &q, pubkey) &&
|
||||||
rustsecp256k1_v0_1_0_ecdsa_sig_verify(&ctx->ecmult_ctx, &r, &s, &q, &m));
|
rustsecp256k1_v0_1_1_ecdsa_sig_verify(&ctx->ecmult_ctx, &r, &s, &q, &m));
|
||||||
}
|
}
|
||||||
|
|
||||||
static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *offset, const void *data, unsigned int len) {
|
static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *offset, const void *data, unsigned int len) {
|
||||||
|
@ -377,7 +377,7 @@ static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *off
|
||||||
static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) {
|
static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) {
|
||||||
unsigned char keydata[112];
|
unsigned char keydata[112];
|
||||||
unsigned int offset = 0;
|
unsigned int offset = 0;
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 rng;
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 rng;
|
||||||
unsigned int i;
|
unsigned int i;
|
||||||
/* We feed a byte array to the PRNG as input, consisting of:
|
/* We feed a byte array to the PRNG as input, consisting of:
|
||||||
* - the private key (32 bytes) and message (32 bytes), see RFC 6979 3.2d.
|
* - the private key (32 bytes) and message (32 bytes), see RFC 6979 3.2d.
|
||||||
|
@ -395,167 +395,167 @@ static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *m
|
||||||
if (algo16 != NULL) {
|
if (algo16 != NULL) {
|
||||||
buffer_append(keydata, &offset, algo16, 16);
|
buffer_append(keydata, &offset, algo16, 16);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(&rng, keydata, offset);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(&rng, keydata, offset);
|
||||||
memset(keydata, 0, sizeof(keydata));
|
memset(keydata, 0, sizeof(keydata));
|
||||||
for (i = 0; i <= counter; i++) {
|
for (i = 0; i <= counter; i++) {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_finalize(&rng);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_finalize(&rng);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rustsecp256k1_v0_1_0_nonce_function rustsecp256k1_v0_1_0_nonce_function_rfc6979 = nonce_function_rfc6979;
|
const rustsecp256k1_v0_1_1_nonce_function rustsecp256k1_v0_1_1_nonce_function_rfc6979 = nonce_function_rfc6979;
|
||||||
const rustsecp256k1_v0_1_0_nonce_function rustsecp256k1_v0_1_0_nonce_function_default = nonce_function_rfc6979;
|
const rustsecp256k1_v0_1_1_nonce_function rustsecp256k1_v0_1_1_nonce_function_default = nonce_function_rfc6979;
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ecdsa_sign(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_ecdsa_signature *signature, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1_v0_1_0_nonce_function noncefp, const void* noncedata) {
|
int rustsecp256k1_v0_1_1_ecdsa_sign(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_ecdsa_signature *signature, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1_v0_1_1_nonce_function noncefp, const void* noncedata) {
|
||||||
rustsecp256k1_v0_1_0_scalar r, s;
|
rustsecp256k1_v0_1_1_scalar r, s;
|
||||||
rustsecp256k1_v0_1_0_scalar sec, non, msg;
|
rustsecp256k1_v0_1_1_scalar sec, non, msg;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
||||||
ARG_CHECK(msg32 != NULL);
|
ARG_CHECK(msg32 != NULL);
|
||||||
ARG_CHECK(signature != NULL);
|
ARG_CHECK(signature != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
if (noncefp == NULL) {
|
if (noncefp == NULL) {
|
||||||
noncefp = rustsecp256k1_v0_1_0_nonce_function_default;
|
noncefp = rustsecp256k1_v0_1_1_nonce_function_default;
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, &overflow);
|
||||||
/* Fail if the secret key is invalid. */
|
/* Fail if the secret key is invalid. */
|
||||||
if (!overflow && !rustsecp256k1_v0_1_0_scalar_is_zero(&sec)) {
|
if (!overflow && !rustsecp256k1_v0_1_1_scalar_is_zero(&sec)) {
|
||||||
unsigned char nonce32[32];
|
unsigned char nonce32[32];
|
||||||
unsigned int count = 0;
|
unsigned int count = 0;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&msg, msg32, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&msg, msg32, NULL);
|
||||||
while (1) {
|
while (1) {
|
||||||
ret = noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count);
|
ret = noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count);
|
||||||
if (!ret) {
|
if (!ret) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&non, nonce32, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&non, nonce32, &overflow);
|
||||||
if (!overflow && !rustsecp256k1_v0_1_0_scalar_is_zero(&non)) {
|
if (!overflow && !rustsecp256k1_v0_1_1_scalar_is_zero(&non)) {
|
||||||
if (rustsecp256k1_v0_1_0_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, &r, &s, &sec, &msg, &non, NULL)) {
|
if (rustsecp256k1_v0_1_1_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, &r, &s, &sec, &msg, &non, NULL)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
memset(nonce32, 0, 32);
|
memset(nonce32, 0, 32);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&msg);
|
rustsecp256k1_v0_1_1_scalar_clear(&msg);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&non);
|
rustsecp256k1_v0_1_1_scalar_clear(&non);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
}
|
}
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(signature, &r, &s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(signature, &r, &s);
|
||||||
} else {
|
} else {
|
||||||
memset(signature, 0, sizeof(*signature));
|
memset(signature, 0, sizeof(*signature));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_seckey_verify(const rustsecp256k1_v0_1_0_context* ctx, const unsigned char *seckey) {
|
int rustsecp256k1_v0_1_1_ec_seckey_verify(const rustsecp256k1_v0_1_1_context* ctx, const unsigned char *seckey) {
|
||||||
rustsecp256k1_v0_1_0_scalar sec;
|
rustsecp256k1_v0_1_1_scalar sec;
|
||||||
int ret;
|
int ret;
|
||||||
int overflow;
|
int overflow;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, &overflow);
|
||||||
ret = !overflow && !rustsecp256k1_v0_1_0_scalar_is_zero(&sec);
|
ret = !overflow && !rustsecp256k1_v0_1_1_scalar_is_zero(&sec);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_create(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubkey, const unsigned char *seckey) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_create(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubkey, const unsigned char *seckey) {
|
||||||
rustsecp256k1_v0_1_0_gej pj;
|
rustsecp256k1_v0_1_1_gej pj;
|
||||||
rustsecp256k1_v0_1_0_ge p;
|
rustsecp256k1_v0_1_1_ge p;
|
||||||
rustsecp256k1_v0_1_0_scalar sec;
|
rustsecp256k1_v0_1_1_scalar sec;
|
||||||
int overflow;
|
int overflow;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx));
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, &overflow);
|
||||||
ret = (!overflow) & (!rustsecp256k1_v0_1_0_scalar_is_zero(&sec));
|
ret = (!overflow) & (!rustsecp256k1_v0_1_1_scalar_is_zero(&sec));
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen(&ctx->ecmult_gen_ctx, &pj, &sec);
|
rustsecp256k1_v0_1_1_ecmult_gen(&ctx->ecmult_gen_ctx, &pj, &sec);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&p, &pj);
|
rustsecp256k1_v0_1_1_ge_set_gej(&p, &pj);
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &p);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &p);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_privkey_negate(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *seckey) {
|
int rustsecp256k1_v0_1_1_ec_privkey_negate(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *seckey) {
|
||||||
rustsecp256k1_v0_1_0_scalar sec;
|
rustsecp256k1_v0_1_1_scalar sec;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, NULL);
|
||||||
rustsecp256k1_v0_1_0_scalar_negate(&sec, &sec);
|
rustsecp256k1_v0_1_1_scalar_negate(&sec, &sec);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(seckey, &sec);
|
rustsecp256k1_v0_1_1_scalar_get_b32(seckey, &sec);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_negate(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubkey) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_negate(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubkey) {
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
rustsecp256k1_v0_1_0_ge p;
|
rustsecp256k1_v0_1_1_ge p;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
|
|
||||||
ret = rustsecp256k1_v0_1_0_pubkey_load(ctx, &p, pubkey);
|
ret = rustsecp256k1_v0_1_1_pubkey_load(ctx, &p, pubkey);
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_ge_neg(&p, &p);
|
rustsecp256k1_v0_1_1_ge_neg(&p, &p);
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &p);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &p);
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_privkey_tweak_add(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *seckey, const unsigned char *tweak) {
|
int rustsecp256k1_v0_1_1_ec_privkey_tweak_add(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *seckey, const unsigned char *tweak) {
|
||||||
rustsecp256k1_v0_1_0_scalar term;
|
rustsecp256k1_v0_1_1_scalar term;
|
||||||
rustsecp256k1_v0_1_0_scalar sec;
|
rustsecp256k1_v0_1_1_scalar sec;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
ARG_CHECK(tweak != NULL);
|
ARG_CHECK(tweak != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&term, tweak, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&term, tweak, &overflow);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, NULL);
|
||||||
|
|
||||||
ret = !overflow && rustsecp256k1_v0_1_0_eckey_privkey_tweak_add(&sec, &term);
|
ret = !overflow && rustsecp256k1_v0_1_1_eckey_privkey_tweak_add(&sec, &term);
|
||||||
memset(seckey, 0, 32);
|
memset(seckey, 0, 32);
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(seckey, &sec);
|
rustsecp256k1_v0_1_1_scalar_get_b32(seckey, &sec);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&term);
|
rustsecp256k1_v0_1_1_scalar_clear(&term);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_tweak_add(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubkey, const unsigned char *tweak) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_tweak_add(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubkey, const unsigned char *tweak) {
|
||||||
rustsecp256k1_v0_1_0_ge p;
|
rustsecp256k1_v0_1_1_ge p;
|
||||||
rustsecp256k1_v0_1_0_scalar term;
|
rustsecp256k1_v0_1_1_scalar term;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_context_is_built(&ctx->ecmult_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_context_is_built(&ctx->ecmult_ctx));
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
ARG_CHECK(tweak != NULL);
|
ARG_CHECK(tweak != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&term, tweak, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&term, tweak, &overflow);
|
||||||
ret = !overflow && rustsecp256k1_v0_1_0_pubkey_load(ctx, &p, pubkey);
|
ret = !overflow && rustsecp256k1_v0_1_1_pubkey_load(ctx, &p, pubkey);
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
if (ret) {
|
if (ret) {
|
||||||
if (rustsecp256k1_v0_1_0_eckey_pubkey_tweak_add(&ctx->ecmult_ctx, &p, &term)) {
|
if (rustsecp256k1_v0_1_1_eckey_pubkey_tweak_add(&ctx->ecmult_ctx, &p, &term)) {
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &p);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &p);
|
||||||
} else {
|
} else {
|
||||||
ret = 0;
|
ret = 0;
|
||||||
}
|
}
|
||||||
|
@ -564,44 +564,44 @@ int rustsecp256k1_v0_1_0_ec_pubkey_tweak_add(const rustsecp256k1_v0_1_0_context*
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_privkey_tweak_mul(const rustsecp256k1_v0_1_0_context* ctx, unsigned char *seckey, const unsigned char *tweak) {
|
int rustsecp256k1_v0_1_1_ec_privkey_tweak_mul(const rustsecp256k1_v0_1_1_context* ctx, unsigned char *seckey, const unsigned char *tweak) {
|
||||||
rustsecp256k1_v0_1_0_scalar factor;
|
rustsecp256k1_v0_1_1_scalar factor;
|
||||||
rustsecp256k1_v0_1_0_scalar sec;
|
rustsecp256k1_v0_1_1_scalar sec;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(seckey != NULL);
|
ARG_CHECK(seckey != NULL);
|
||||||
ARG_CHECK(tweak != NULL);
|
ARG_CHECK(tweak != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&factor, tweak, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&factor, tweak, &overflow);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&sec, seckey, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&sec, seckey, NULL);
|
||||||
ret = !overflow && rustsecp256k1_v0_1_0_eckey_privkey_tweak_mul(&sec, &factor);
|
ret = !overflow && rustsecp256k1_v0_1_1_eckey_privkey_tweak_mul(&sec, &factor);
|
||||||
memset(seckey, 0, 32);
|
memset(seckey, 0, 32);
|
||||||
if (ret) {
|
if (ret) {
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(seckey, &sec);
|
rustsecp256k1_v0_1_1_scalar_get_b32(seckey, &sec);
|
||||||
}
|
}
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&sec);
|
rustsecp256k1_v0_1_1_scalar_clear(&sec);
|
||||||
rustsecp256k1_v0_1_0_scalar_clear(&factor);
|
rustsecp256k1_v0_1_1_scalar_clear(&factor);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_tweak_mul(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubkey, const unsigned char *tweak) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_tweak_mul(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubkey, const unsigned char *tweak) {
|
||||||
rustsecp256k1_v0_1_0_ge p;
|
rustsecp256k1_v0_1_1_ge p;
|
||||||
rustsecp256k1_v0_1_0_scalar factor;
|
rustsecp256k1_v0_1_1_scalar factor;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
int overflow = 0;
|
int overflow = 0;
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
ARG_CHECK(rustsecp256k1_v0_1_0_ecmult_context_is_built(&ctx->ecmult_ctx));
|
ARG_CHECK(rustsecp256k1_v0_1_1_ecmult_context_is_built(&ctx->ecmult_ctx));
|
||||||
ARG_CHECK(pubkey != NULL);
|
ARG_CHECK(pubkey != NULL);
|
||||||
ARG_CHECK(tweak != NULL);
|
ARG_CHECK(tweak != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(&factor, tweak, &overflow);
|
rustsecp256k1_v0_1_1_scalar_set_b32(&factor, tweak, &overflow);
|
||||||
ret = !overflow && rustsecp256k1_v0_1_0_pubkey_load(ctx, &p, pubkey);
|
ret = !overflow && rustsecp256k1_v0_1_1_pubkey_load(ctx, &p, pubkey);
|
||||||
memset(pubkey, 0, sizeof(*pubkey));
|
memset(pubkey, 0, sizeof(*pubkey));
|
||||||
if (ret) {
|
if (ret) {
|
||||||
if (rustsecp256k1_v0_1_0_eckey_pubkey_tweak_mul(&ctx->ecmult_ctx, &p, &factor)) {
|
if (rustsecp256k1_v0_1_1_eckey_pubkey_tweak_mul(&ctx->ecmult_ctx, &p, &factor)) {
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubkey, &p);
|
rustsecp256k1_v0_1_1_pubkey_save(pubkey, &p);
|
||||||
} else {
|
} else {
|
||||||
ret = 0;
|
ret = 0;
|
||||||
}
|
}
|
||||||
|
@ -610,35 +610,35 @@ int rustsecp256k1_v0_1_0_ec_pubkey_tweak_mul(const rustsecp256k1_v0_1_0_context*
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_context_randomize(rustsecp256k1_v0_1_0_context* ctx, const unsigned char *seed32) {
|
int rustsecp256k1_v0_1_1_context_randomize(rustsecp256k1_v0_1_1_context* ctx, const unsigned char *seed32) {
|
||||||
VERIFY_CHECK(ctx != NULL);
|
VERIFY_CHECK(ctx != NULL);
|
||||||
if (rustsecp256k1_v0_1_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) {
|
if (rustsecp256k1_v0_1_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen_blind(&ctx->ecmult_gen_ctx, seed32);
|
rustsecp256k1_v0_1_1_ecmult_gen_blind(&ctx->ecmult_gen_ctx, seed32);
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_ec_pubkey_combine(const rustsecp256k1_v0_1_0_context* ctx, rustsecp256k1_v0_1_0_pubkey *pubnonce, const rustsecp256k1_v0_1_0_pubkey * const *pubnonces, size_t n) {
|
int rustsecp256k1_v0_1_1_ec_pubkey_combine(const rustsecp256k1_v0_1_1_context* ctx, rustsecp256k1_v0_1_1_pubkey *pubnonce, const rustsecp256k1_v0_1_1_pubkey * const *pubnonces, size_t n) {
|
||||||
size_t i;
|
size_t i;
|
||||||
rustsecp256k1_v0_1_0_gej Qj;
|
rustsecp256k1_v0_1_1_gej Qj;
|
||||||
rustsecp256k1_v0_1_0_ge Q;
|
rustsecp256k1_v0_1_1_ge Q;
|
||||||
|
|
||||||
ARG_CHECK(pubnonce != NULL);
|
ARG_CHECK(pubnonce != NULL);
|
||||||
memset(pubnonce, 0, sizeof(*pubnonce));
|
memset(pubnonce, 0, sizeof(*pubnonce));
|
||||||
ARG_CHECK(n >= 1);
|
ARG_CHECK(n >= 1);
|
||||||
ARG_CHECK(pubnonces != NULL);
|
ARG_CHECK(pubnonces != NULL);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_gej_set_infinity(&Qj);
|
rustsecp256k1_v0_1_1_gej_set_infinity(&Qj);
|
||||||
|
|
||||||
for (i = 0; i < n; i++) {
|
for (i = 0; i < n; i++) {
|
||||||
rustsecp256k1_v0_1_0_pubkey_load(ctx, &Q, pubnonces[i]);
|
rustsecp256k1_v0_1_1_pubkey_load(ctx, &Q, pubnonces[i]);
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(&Qj, &Qj, &Q);
|
rustsecp256k1_v0_1_1_gej_add_ge(&Qj, &Qj, &Q);
|
||||||
}
|
}
|
||||||
if (rustsecp256k1_v0_1_0_gej_is_infinity(&Qj)) {
|
if (rustsecp256k1_v0_1_1_gej_is_infinity(&Qj)) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&Q, &Qj);
|
rustsecp256k1_v0_1_1_ge_set_gej(&Q, &Qj);
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(pubnonce, &Q);
|
rustsecp256k1_v0_1_1_pubkey_save(pubnonce, &Q);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,25 +14,25 @@
|
||||||
/* A non-cryptographic RNG used only for test infrastructure. */
|
/* A non-cryptographic RNG used only for test infrastructure. */
|
||||||
|
|
||||||
/** Seed the pseudorandom number generator for testing. */
|
/** Seed the pseudorandom number generator for testing. */
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_rand_seed(const unsigned char *seed16);
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_rand_seed(const unsigned char *seed16);
|
||||||
|
|
||||||
/** Generate a pseudorandom number in the range [0..2**32-1]. */
|
/** Generate a pseudorandom number in the range [0..2**32-1]. */
|
||||||
static uint32_t rustsecp256k1_v0_1_0_rand32(void);
|
static uint32_t rustsecp256k1_v0_1_1_rand32(void);
|
||||||
|
|
||||||
/** Generate a pseudorandom number in the range [0..2**bits-1]. Bits must be 1 or
|
/** Generate a pseudorandom number in the range [0..2**bits-1]. Bits must be 1 or
|
||||||
* more. */
|
* more. */
|
||||||
static uint32_t rustsecp256k1_v0_1_0_rand_bits(int bits);
|
static uint32_t rustsecp256k1_v0_1_1_rand_bits(int bits);
|
||||||
|
|
||||||
/** Generate a pseudorandom number in the range [0..range-1]. */
|
/** Generate a pseudorandom number in the range [0..range-1]. */
|
||||||
static uint32_t rustsecp256k1_v0_1_0_rand_int(uint32_t range);
|
static uint32_t rustsecp256k1_v0_1_1_rand_int(uint32_t range);
|
||||||
|
|
||||||
/** Generate a pseudorandom 32-byte array. */
|
/** Generate a pseudorandom 32-byte array. */
|
||||||
static void rustsecp256k1_v0_1_0_rand256(unsigned char *b32);
|
static void rustsecp256k1_v0_1_1_rand256(unsigned char *b32);
|
||||||
|
|
||||||
/** Generate a pseudorandom 32-byte array with long sequences of zero and one bits. */
|
/** Generate a pseudorandom 32-byte array with long sequences of zero and one bits. */
|
||||||
static void rustsecp256k1_v0_1_0_rand256_test(unsigned char *b32);
|
static void rustsecp256k1_v0_1_1_rand256_test(unsigned char *b32);
|
||||||
|
|
||||||
/** Generate pseudorandom bytes with long sequences of zero and one bits. */
|
/** Generate pseudorandom bytes with long sequences of zero and one bits. */
|
||||||
static void rustsecp256k1_v0_1_0_rand_bytes_test(unsigned char *bytes, size_t len);
|
static void rustsecp256k1_v0_1_1_rand_bytes_test(unsigned char *bytes, size_t len);
|
||||||
|
|
||||||
#endif /* SECP256K1_TESTRAND_H */
|
#endif /* SECP256K1_TESTRAND_H */
|
||||||
|
|
|
@ -13,38 +13,38 @@
|
||||||
#include "testrand.h"
|
#include "testrand.h"
|
||||||
#include "hash.h"
|
#include "hash.h"
|
||||||
|
|
||||||
static rustsecp256k1_v0_1_0_rfc6979_hmac_sha256 rustsecp256k1_v0_1_0_test_rng;
|
static rustsecp256k1_v0_1_1_rfc6979_hmac_sha256 rustsecp256k1_v0_1_1_test_rng;
|
||||||
static uint32_t rustsecp256k1_v0_1_0_test_rng_precomputed[8];
|
static uint32_t rustsecp256k1_v0_1_1_test_rng_precomputed[8];
|
||||||
static int rustsecp256k1_v0_1_0_test_rng_precomputed_used = 8;
|
static int rustsecp256k1_v0_1_1_test_rng_precomputed_used = 8;
|
||||||
static uint64_t rustsecp256k1_v0_1_0_test_rng_integer;
|
static uint64_t rustsecp256k1_v0_1_1_test_rng_integer;
|
||||||
static int rustsecp256k1_v0_1_0_test_rng_integer_bits_left = 0;
|
static int rustsecp256k1_v0_1_1_test_rng_integer_bits_left = 0;
|
||||||
|
|
||||||
SECP256K1_INLINE static void rustsecp256k1_v0_1_0_rand_seed(const unsigned char *seed16) {
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_rand_seed(const unsigned char *seed16) {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_initialize(&rustsecp256k1_v0_1_0_test_rng, seed16, 16);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_initialize(&rustsecp256k1_v0_1_1_test_rng, seed16, 16);
|
||||||
}
|
}
|
||||||
|
|
||||||
SECP256K1_INLINE static uint32_t rustsecp256k1_v0_1_0_rand32(void) {
|
SECP256K1_INLINE static uint32_t rustsecp256k1_v0_1_1_rand32(void) {
|
||||||
if (rustsecp256k1_v0_1_0_test_rng_precomputed_used == 8) {
|
if (rustsecp256k1_v0_1_1_test_rng_precomputed_used == 8) {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rustsecp256k1_v0_1_0_test_rng, (unsigned char*)(&rustsecp256k1_v0_1_0_test_rng_precomputed[0]), sizeof(rustsecp256k1_v0_1_0_test_rng_precomputed));
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rustsecp256k1_v0_1_1_test_rng, (unsigned char*)(&rustsecp256k1_v0_1_1_test_rng_precomputed[0]), sizeof(rustsecp256k1_v0_1_1_test_rng_precomputed));
|
||||||
rustsecp256k1_v0_1_0_test_rng_precomputed_used = 0;
|
rustsecp256k1_v0_1_1_test_rng_precomputed_used = 0;
|
||||||
}
|
}
|
||||||
return rustsecp256k1_v0_1_0_test_rng_precomputed[rustsecp256k1_v0_1_0_test_rng_precomputed_used++];
|
return rustsecp256k1_v0_1_1_test_rng_precomputed[rustsecp256k1_v0_1_1_test_rng_precomputed_used++];
|
||||||
}
|
}
|
||||||
|
|
||||||
static uint32_t rustsecp256k1_v0_1_0_rand_bits(int bits) {
|
static uint32_t rustsecp256k1_v0_1_1_rand_bits(int bits) {
|
||||||
uint32_t ret;
|
uint32_t ret;
|
||||||
if (rustsecp256k1_v0_1_0_test_rng_integer_bits_left < bits) {
|
if (rustsecp256k1_v0_1_1_test_rng_integer_bits_left < bits) {
|
||||||
rustsecp256k1_v0_1_0_test_rng_integer |= (((uint64_t)rustsecp256k1_v0_1_0_rand32()) << rustsecp256k1_v0_1_0_test_rng_integer_bits_left);
|
rustsecp256k1_v0_1_1_test_rng_integer |= (((uint64_t)rustsecp256k1_v0_1_1_rand32()) << rustsecp256k1_v0_1_1_test_rng_integer_bits_left);
|
||||||
rustsecp256k1_v0_1_0_test_rng_integer_bits_left += 32;
|
rustsecp256k1_v0_1_1_test_rng_integer_bits_left += 32;
|
||||||
}
|
}
|
||||||
ret = rustsecp256k1_v0_1_0_test_rng_integer;
|
ret = rustsecp256k1_v0_1_1_test_rng_integer;
|
||||||
rustsecp256k1_v0_1_0_test_rng_integer >>= bits;
|
rustsecp256k1_v0_1_1_test_rng_integer >>= bits;
|
||||||
rustsecp256k1_v0_1_0_test_rng_integer_bits_left -= bits;
|
rustsecp256k1_v0_1_1_test_rng_integer_bits_left -= bits;
|
||||||
ret &= ((~((uint32_t)0)) >> (32 - bits));
|
ret &= ((~((uint32_t)0)) >> (32 - bits));
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static uint32_t rustsecp256k1_v0_1_0_rand_int(uint32_t range) {
|
static uint32_t rustsecp256k1_v0_1_1_rand_int(uint32_t range) {
|
||||||
/* We want a uniform integer between 0 and range-1, inclusive.
|
/* We want a uniform integer between 0 and range-1, inclusive.
|
||||||
* B is the smallest number such that range <= 2**B.
|
* B is the smallest number such that range <= 2**B.
|
||||||
* two mechanisms implemented here:
|
* two mechanisms implemented here:
|
||||||
|
@ -76,25 +76,25 @@ static uint32_t rustsecp256k1_v0_1_0_rand_int(uint32_t range) {
|
||||||
mult = 1;
|
mult = 1;
|
||||||
}
|
}
|
||||||
while(1) {
|
while(1) {
|
||||||
uint32_t x = rustsecp256k1_v0_1_0_rand_bits(bits);
|
uint32_t x = rustsecp256k1_v0_1_1_rand_bits(bits);
|
||||||
if (x < trange) {
|
if (x < trange) {
|
||||||
return (mult == 1) ? x : (x % range);
|
return (mult == 1) ? x : (x % range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rand256(unsigned char *b32) {
|
static void rustsecp256k1_v0_1_1_rand256(unsigned char *b32) {
|
||||||
rustsecp256k1_v0_1_0_rfc6979_hmac_sha256_generate(&rustsecp256k1_v0_1_0_test_rng, b32, 32);
|
rustsecp256k1_v0_1_1_rfc6979_hmac_sha256_generate(&rustsecp256k1_v0_1_1_test_rng, b32, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rand_bytes_test(unsigned char *bytes, size_t len) {
|
static void rustsecp256k1_v0_1_1_rand_bytes_test(unsigned char *bytes, size_t len) {
|
||||||
size_t bits = 0;
|
size_t bits = 0;
|
||||||
memset(bytes, 0, len);
|
memset(bytes, 0, len);
|
||||||
while (bits < len * 8) {
|
while (bits < len * 8) {
|
||||||
int now;
|
int now;
|
||||||
uint32_t val;
|
uint32_t val;
|
||||||
now = 1 + (rustsecp256k1_v0_1_0_rand_bits(6) * rustsecp256k1_v0_1_0_rand_bits(5) + 16) / 31;
|
now = 1 + (rustsecp256k1_v0_1_1_rand_bits(6) * rustsecp256k1_v0_1_1_rand_bits(5) + 16) / 31;
|
||||||
val = rustsecp256k1_v0_1_0_rand_bits(1);
|
val = rustsecp256k1_v0_1_1_rand_bits(1);
|
||||||
while (now > 0 && bits < len * 8) {
|
while (now > 0 && bits < len * 8) {
|
||||||
bytes[bits / 8] |= val << (bits % 8);
|
bytes[bits / 8] |= val << (bits % 8);
|
||||||
now--;
|
now--;
|
||||||
|
@ -103,8 +103,8 @@ static void rustsecp256k1_v0_1_0_rand_bytes_test(unsigned char *bytes, size_t le
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void rustsecp256k1_v0_1_0_rand256_test(unsigned char *b32) {
|
static void rustsecp256k1_v0_1_1_rand256_test(unsigned char *b32) {
|
||||||
rustsecp256k1_v0_1_0_rand_bytes_test(b32, 32);
|
rustsecp256k1_v0_1_1_rand_bytes_test(b32, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* SECP256K1_TESTRAND_IMPL_H */
|
#endif /* SECP256K1_TESTRAND_IMPL_H */
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -32,47 +32,47 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** stolen from tests.c */
|
/** stolen from tests.c */
|
||||||
void ge_equals_ge(const rustsecp256k1_v0_1_0_ge *a, const rustsecp256k1_v0_1_0_ge *b) {
|
void ge_equals_ge(const rustsecp256k1_v0_1_1_ge *a, const rustsecp256k1_v0_1_1_ge *b) {
|
||||||
CHECK(a->infinity == b->infinity);
|
CHECK(a->infinity == b->infinity);
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&a->x, &b->x));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&a->x, &b->x));
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&a->y, &b->y));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&a->y, &b->y));
|
||||||
}
|
}
|
||||||
|
|
||||||
void ge_equals_gej(const rustsecp256k1_v0_1_0_ge *a, const rustsecp256k1_v0_1_0_gej *b) {
|
void ge_equals_gej(const rustsecp256k1_v0_1_1_ge *a, const rustsecp256k1_v0_1_1_gej *b) {
|
||||||
rustsecp256k1_v0_1_0_fe z2s;
|
rustsecp256k1_v0_1_1_fe z2s;
|
||||||
rustsecp256k1_v0_1_0_fe u1, u2, s1, s2;
|
rustsecp256k1_v0_1_1_fe u1, u2, s1, s2;
|
||||||
CHECK(a->infinity == b->infinity);
|
CHECK(a->infinity == b->infinity);
|
||||||
if (a->infinity) {
|
if (a->infinity) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
/* Check a.x * b.z^2 == b.x && a.y * b.z^3 == b.y, to avoid inverses. */
|
/* Check a.x * b.z^2 == b.x && a.y * b.z^3 == b.y, to avoid inverses. */
|
||||||
rustsecp256k1_v0_1_0_fe_sqr(&z2s, &b->z);
|
rustsecp256k1_v0_1_1_fe_sqr(&z2s, &b->z);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&u1, &a->x, &z2s);
|
rustsecp256k1_v0_1_1_fe_mul(&u1, &a->x, &z2s);
|
||||||
u2 = b->x; rustsecp256k1_v0_1_0_fe_normalize_weak(&u2);
|
u2 = b->x; rustsecp256k1_v0_1_1_fe_normalize_weak(&u2);
|
||||||
rustsecp256k1_v0_1_0_fe_mul(&s1, &a->y, &z2s); rustsecp256k1_v0_1_0_fe_mul(&s1, &s1, &b->z);
|
rustsecp256k1_v0_1_1_fe_mul(&s1, &a->y, &z2s); rustsecp256k1_v0_1_1_fe_mul(&s1, &s1, &b->z);
|
||||||
s2 = b->y; rustsecp256k1_v0_1_0_fe_normalize_weak(&s2);
|
s2 = b->y; rustsecp256k1_v0_1_1_fe_normalize_weak(&s2);
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&u1, &u2));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&u1, &u2));
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&s1, &s2));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&s1, &s2));
|
||||||
}
|
}
|
||||||
|
|
||||||
void random_fe(rustsecp256k1_v0_1_0_fe *x) {
|
void random_fe(rustsecp256k1_v0_1_1_fe *x) {
|
||||||
unsigned char bin[32];
|
unsigned char bin[32];
|
||||||
do {
|
do {
|
||||||
rustsecp256k1_v0_1_0_rand256(bin);
|
rustsecp256k1_v0_1_1_rand256(bin);
|
||||||
if (rustsecp256k1_v0_1_0_fe_set_b32(x, bin)) {
|
if (rustsecp256k1_v0_1_1_fe_set_b32(x, bin)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} while(1);
|
} while(1);
|
||||||
}
|
}
|
||||||
/** END stolen from tests.c */
|
/** END stolen from tests.c */
|
||||||
|
|
||||||
int rustsecp256k1_v0_1_0_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg32,
|
int rustsecp256k1_v0_1_1_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg32,
|
||||||
const unsigned char *key32, const unsigned char *algo16,
|
const unsigned char *key32, const unsigned char *algo16,
|
||||||
void *data, unsigned int attempt) {
|
void *data, unsigned int attempt) {
|
||||||
rustsecp256k1_v0_1_0_scalar s;
|
rustsecp256k1_v0_1_1_scalar s;
|
||||||
int *idata = data;
|
int *idata = data;
|
||||||
(void)msg32;
|
(void)msg32;
|
||||||
(void)key32;
|
(void)key32;
|
||||||
|
@ -84,97 +84,97 @@ int rustsecp256k1_v0_1_0_nonce_function_smallint(unsigned char *nonce32, const u
|
||||||
if (attempt > 0) {
|
if (attempt > 0) {
|
||||||
*idata = (*idata + 1) % EXHAUSTIVE_TEST_ORDER;
|
*idata = (*idata + 1) % EXHAUSTIVE_TEST_ORDER;
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&s, *idata);
|
rustsecp256k1_v0_1_1_scalar_set_int(&s, *idata);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(nonce32, &s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(nonce32, &s);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_ENDOMORPHISM
|
#ifdef USE_ENDOMORPHISM
|
||||||
void test_exhaustive_endomorphism(const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_endomorphism(const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
int i;
|
int i;
|
||||||
for (i = 0; i < order; i++) {
|
for (i = 0; i < order; i++) {
|
||||||
rustsecp256k1_v0_1_0_ge res;
|
rustsecp256k1_v0_1_1_ge res;
|
||||||
rustsecp256k1_v0_1_0_ge_mul_lambda(&res, &group[i]);
|
rustsecp256k1_v0_1_1_ge_mul_lambda(&res, &group[i]);
|
||||||
ge_equals_ge(&group[i * EXHAUSTIVE_TEST_LAMBDA % EXHAUSTIVE_TEST_ORDER], &res);
|
ge_equals_ge(&group[i * EXHAUSTIVE_TEST_LAMBDA % EXHAUSTIVE_TEST_ORDER], &res);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void test_exhaustive_addition(const rustsecp256k1_v0_1_0_ge *group, const rustsecp256k1_v0_1_0_gej *groupj, int order) {
|
void test_exhaustive_addition(const rustsecp256k1_v0_1_1_ge *group, const rustsecp256k1_v0_1_1_gej *groupj, int order) {
|
||||||
int i, j;
|
int i, j;
|
||||||
|
|
||||||
/* Sanity-check (and check infinity functions) */
|
/* Sanity-check (and check infinity functions) */
|
||||||
CHECK(rustsecp256k1_v0_1_0_ge_is_infinity(&group[0]));
|
CHECK(rustsecp256k1_v0_1_1_ge_is_infinity(&group[0]));
|
||||||
CHECK(rustsecp256k1_v0_1_0_gej_is_infinity(&groupj[0]));
|
CHECK(rustsecp256k1_v0_1_1_gej_is_infinity(&groupj[0]));
|
||||||
for (i = 1; i < order; i++) {
|
for (i = 1; i < order; i++) {
|
||||||
CHECK(!rustsecp256k1_v0_1_0_ge_is_infinity(&group[i]));
|
CHECK(!rustsecp256k1_v0_1_1_ge_is_infinity(&group[i]));
|
||||||
CHECK(!rustsecp256k1_v0_1_0_gej_is_infinity(&groupj[i]));
|
CHECK(!rustsecp256k1_v0_1_1_gej_is_infinity(&groupj[i]));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check all addition formulae */
|
/* Check all addition formulae */
|
||||||
for (j = 0; j < order; j++) {
|
for (j = 0; j < order; j++) {
|
||||||
rustsecp256k1_v0_1_0_fe fe_inv;
|
rustsecp256k1_v0_1_1_fe fe_inv;
|
||||||
rustsecp256k1_v0_1_0_fe_inv(&fe_inv, &groupj[j].z);
|
rustsecp256k1_v0_1_1_fe_inv(&fe_inv, &groupj[j].z);
|
||||||
for (i = 0; i < order; i++) {
|
for (i = 0; i < order; i++) {
|
||||||
rustsecp256k1_v0_1_0_ge zless_gej;
|
rustsecp256k1_v0_1_1_ge zless_gej;
|
||||||
rustsecp256k1_v0_1_0_gej tmp;
|
rustsecp256k1_v0_1_1_gej tmp;
|
||||||
/* add_var */
|
/* add_var */
|
||||||
rustsecp256k1_v0_1_0_gej_add_var(&tmp, &groupj[i], &groupj[j], NULL);
|
rustsecp256k1_v0_1_1_gej_add_var(&tmp, &groupj[i], &groupj[j], NULL);
|
||||||
ge_equals_gej(&group[(i + j) % order], &tmp);
|
ge_equals_gej(&group[(i + j) % order], &tmp);
|
||||||
/* add_ge */
|
/* add_ge */
|
||||||
if (j > 0) {
|
if (j > 0) {
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(&tmp, &groupj[i], &group[j]);
|
rustsecp256k1_v0_1_1_gej_add_ge(&tmp, &groupj[i], &group[j]);
|
||||||
ge_equals_gej(&group[(i + j) % order], &tmp);
|
ge_equals_gej(&group[(i + j) % order], &tmp);
|
||||||
}
|
}
|
||||||
/* add_ge_var */
|
/* add_ge_var */
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge_var(&tmp, &groupj[i], &group[j], NULL);
|
rustsecp256k1_v0_1_1_gej_add_ge_var(&tmp, &groupj[i], &group[j], NULL);
|
||||||
ge_equals_gej(&group[(i + j) % order], &tmp);
|
ge_equals_gej(&group[(i + j) % order], &tmp);
|
||||||
/* add_zinv_var */
|
/* add_zinv_var */
|
||||||
zless_gej.infinity = groupj[j].infinity;
|
zless_gej.infinity = groupj[j].infinity;
|
||||||
zless_gej.x = groupj[j].x;
|
zless_gej.x = groupj[j].x;
|
||||||
zless_gej.y = groupj[j].y;
|
zless_gej.y = groupj[j].y;
|
||||||
rustsecp256k1_v0_1_0_gej_add_zinv_var(&tmp, &groupj[i], &zless_gej, &fe_inv);
|
rustsecp256k1_v0_1_1_gej_add_zinv_var(&tmp, &groupj[i], &zless_gej, &fe_inv);
|
||||||
ge_equals_gej(&group[(i + j) % order], &tmp);
|
ge_equals_gej(&group[(i + j) % order], &tmp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check doubling */
|
/* Check doubling */
|
||||||
for (i = 0; i < order; i++) {
|
for (i = 0; i < order; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej tmp;
|
rustsecp256k1_v0_1_1_gej tmp;
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
rustsecp256k1_v0_1_0_gej_double_nonzero(&tmp, &groupj[i], NULL);
|
rustsecp256k1_v0_1_1_gej_double_nonzero(&tmp, &groupj[i], NULL);
|
||||||
ge_equals_gej(&group[(2 * i) % order], &tmp);
|
ge_equals_gej(&group[(2 * i) % order], &tmp);
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_gej_double_var(&tmp, &groupj[i], NULL);
|
rustsecp256k1_v0_1_1_gej_double_var(&tmp, &groupj[i], NULL);
|
||||||
ge_equals_gej(&group[(2 * i) % order], &tmp);
|
ge_equals_gej(&group[(2 * i) % order], &tmp);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check negation */
|
/* Check negation */
|
||||||
for (i = 1; i < order; i++) {
|
for (i = 1; i < order; i++) {
|
||||||
rustsecp256k1_v0_1_0_ge tmp;
|
rustsecp256k1_v0_1_1_ge tmp;
|
||||||
rustsecp256k1_v0_1_0_gej tmpj;
|
rustsecp256k1_v0_1_1_gej tmpj;
|
||||||
rustsecp256k1_v0_1_0_ge_neg(&tmp, &group[i]);
|
rustsecp256k1_v0_1_1_ge_neg(&tmp, &group[i]);
|
||||||
ge_equals_ge(&group[order - i], &tmp);
|
ge_equals_ge(&group[order - i], &tmp);
|
||||||
rustsecp256k1_v0_1_0_gej_neg(&tmpj, &groupj[i]);
|
rustsecp256k1_v0_1_1_gej_neg(&tmpj, &groupj[i]);
|
||||||
ge_equals_gej(&group[order - i], &tmpj);
|
ge_equals_gej(&group[order - i], &tmpj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_exhaustive_ecmult(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, const rustsecp256k1_v0_1_0_gej *groupj, int order) {
|
void test_exhaustive_ecmult(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, const rustsecp256k1_v0_1_1_gej *groupj, int order) {
|
||||||
int i, j, r_log;
|
int i, j, r_log;
|
||||||
for (r_log = 1; r_log < order; r_log++) {
|
for (r_log = 1; r_log < order; r_log++) {
|
||||||
for (j = 0; j < order; j++) {
|
for (j = 0; j < order; j++) {
|
||||||
for (i = 0; i < order; i++) {
|
for (i = 0; i < order; i++) {
|
||||||
rustsecp256k1_v0_1_0_gej tmp;
|
rustsecp256k1_v0_1_1_gej tmp;
|
||||||
rustsecp256k1_v0_1_0_scalar na, ng;
|
rustsecp256k1_v0_1_1_scalar na, ng;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&na, i);
|
rustsecp256k1_v0_1_1_scalar_set_int(&na, i);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&ng, j);
|
rustsecp256k1_v0_1_1_scalar_set_int(&ng, j);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecmult(&ctx->ecmult_ctx, &tmp, &groupj[r_log], &na, &ng);
|
rustsecp256k1_v0_1_1_ecmult(&ctx->ecmult_ctx, &tmp, &groupj[r_log], &na, &ng);
|
||||||
ge_equals_gej(&group[(i * r_log + j) % order], &tmp);
|
ge_equals_gej(&group[(i * r_log + j) % order], &tmp);
|
||||||
|
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
rustsecp256k1_v0_1_0_ecmult_const(&tmp, &group[i], &ng, 256);
|
rustsecp256k1_v0_1_1_ecmult_const(&tmp, &group[i], &ng, 256);
|
||||||
ge_equals_gej(&group[(i * j) % order], &tmp);
|
ge_equals_gej(&group[(i * j) % order], &tmp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -183,106 +183,106 @@ void test_exhaustive_ecmult(const rustsecp256k1_v0_1_0_context *ctx, const rusts
|
||||||
}
|
}
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
rustsecp256k1_v0_1_0_scalar sc[2];
|
rustsecp256k1_v0_1_1_scalar sc[2];
|
||||||
rustsecp256k1_v0_1_0_ge pt[2];
|
rustsecp256k1_v0_1_1_ge pt[2];
|
||||||
} ecmult_multi_data;
|
} ecmult_multi_data;
|
||||||
|
|
||||||
static int ecmult_multi_callback(rustsecp256k1_v0_1_0_scalar *sc, rustsecp256k1_v0_1_0_ge *pt, size_t idx, void *cbdata) {
|
static int ecmult_multi_callback(rustsecp256k1_v0_1_1_scalar *sc, rustsecp256k1_v0_1_1_ge *pt, size_t idx, void *cbdata) {
|
||||||
ecmult_multi_data *data = (ecmult_multi_data*) cbdata;
|
ecmult_multi_data *data = (ecmult_multi_data*) cbdata;
|
||||||
*sc = data->sc[idx];
|
*sc = data->sc[idx];
|
||||||
*pt = data->pt[idx];
|
*pt = data->pt[idx];
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_exhaustive_ecmult_multi(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_ecmult_multi(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
int i, j, k, x, y;
|
int i, j, k, x, y;
|
||||||
rustsecp256k1_v0_1_0_scratch *scratch = rustsecp256k1_v0_1_0_scratch_create(&ctx->error_callback, 4096);
|
rustsecp256k1_v0_1_1_scratch *scratch = rustsecp256k1_v0_1_1_scratch_create(&ctx->error_callback, 4096);
|
||||||
for (i = 0; i < order; i++) {
|
for (i = 0; i < order; i++) {
|
||||||
for (j = 0; j < order; j++) {
|
for (j = 0; j < order; j++) {
|
||||||
for (k = 0; k < order; k++) {
|
for (k = 0; k < order; k++) {
|
||||||
for (x = 0; x < order; x++) {
|
for (x = 0; x < order; x++) {
|
||||||
for (y = 0; y < order; y++) {
|
for (y = 0; y < order; y++) {
|
||||||
rustsecp256k1_v0_1_0_gej tmp;
|
rustsecp256k1_v0_1_1_gej tmp;
|
||||||
rustsecp256k1_v0_1_0_scalar g_sc;
|
rustsecp256k1_v0_1_1_scalar g_sc;
|
||||||
ecmult_multi_data data;
|
ecmult_multi_data data;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&data.sc[0], i);
|
rustsecp256k1_v0_1_1_scalar_set_int(&data.sc[0], i);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&data.sc[1], j);
|
rustsecp256k1_v0_1_1_scalar_set_int(&data.sc[1], j);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&g_sc, k);
|
rustsecp256k1_v0_1_1_scalar_set_int(&g_sc, k);
|
||||||
data.pt[0] = group[x];
|
data.pt[0] = group[x];
|
||||||
data.pt[1] = group[y];
|
data.pt[1] = group[y];
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecmult_multi_var(&ctx->error_callback, &ctx->ecmult_ctx, scratch, &tmp, &g_sc, ecmult_multi_callback, &data, 2);
|
rustsecp256k1_v0_1_1_ecmult_multi_var(&ctx->error_callback, &ctx->ecmult_ctx, scratch, &tmp, &g_sc, ecmult_multi_callback, &data, 2);
|
||||||
ge_equals_gej(&group[(i * x + j * y + k) % order], &tmp);
|
ge_equals_gej(&group[(i * x + j * y + k) % order], &tmp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustsecp256k1_v0_1_0_scratch_destroy(&ctx->error_callback, scratch);
|
rustsecp256k1_v0_1_1_scratch_destroy(&ctx->error_callback, scratch);
|
||||||
}
|
}
|
||||||
|
|
||||||
void r_from_k(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_ge *group, int k) {
|
void r_from_k(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_ge *group, int k) {
|
||||||
rustsecp256k1_v0_1_0_fe x;
|
rustsecp256k1_v0_1_1_fe x;
|
||||||
unsigned char x_bin[32];
|
unsigned char x_bin[32];
|
||||||
k %= EXHAUSTIVE_TEST_ORDER;
|
k %= EXHAUSTIVE_TEST_ORDER;
|
||||||
x = group[k].x;
|
x = group[k].x;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&x);
|
rustsecp256k1_v0_1_1_fe_normalize(&x);
|
||||||
rustsecp256k1_v0_1_0_fe_get_b32(x_bin, &x);
|
rustsecp256k1_v0_1_1_fe_get_b32(x_bin, &x);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_b32(r, x_bin, NULL);
|
rustsecp256k1_v0_1_1_scalar_set_b32(r, x_bin, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_exhaustive_verify(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_verify(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
int s, r, msg, key;
|
int s, r, msg, key;
|
||||||
for (s = 1; s < order; s++) {
|
for (s = 1; s < order; s++) {
|
||||||
for (r = 1; r < order; r++) {
|
for (r = 1; r < order; r++) {
|
||||||
for (msg = 1; msg < order; msg++) {
|
for (msg = 1; msg < order; msg++) {
|
||||||
for (key = 1; key < order; key++) {
|
for (key = 1; key < order; key++) {
|
||||||
rustsecp256k1_v0_1_0_ge nonconst_ge;
|
rustsecp256k1_v0_1_1_ge nonconst_ge;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
rustsecp256k1_v0_1_0_pubkey pk;
|
rustsecp256k1_v0_1_1_pubkey pk;
|
||||||
rustsecp256k1_v0_1_0_scalar sk_s, msg_s, r_s, s_s;
|
rustsecp256k1_v0_1_1_scalar sk_s, msg_s, r_s, s_s;
|
||||||
rustsecp256k1_v0_1_0_scalar s_times_k_s, msg_plus_r_times_sk_s;
|
rustsecp256k1_v0_1_1_scalar s_times_k_s, msg_plus_r_times_sk_s;
|
||||||
int k, should_verify;
|
int k, should_verify;
|
||||||
unsigned char msg32[32];
|
unsigned char msg32[32];
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&s_s, s);
|
rustsecp256k1_v0_1_1_scalar_set_int(&s_s, s);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&r_s, r);
|
rustsecp256k1_v0_1_1_scalar_set_int(&r_s, r);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&msg_s, msg);
|
rustsecp256k1_v0_1_1_scalar_set_int(&msg_s, msg);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&sk_s, key);
|
rustsecp256k1_v0_1_1_scalar_set_int(&sk_s, key);
|
||||||
|
|
||||||
/* Verify by hand */
|
/* Verify by hand */
|
||||||
/* Run through every k value that gives us this r and check that *one* works.
|
/* Run through every k value that gives us this r and check that *one* works.
|
||||||
* Note there could be none, there could be multiple, ECDSA is weird. */
|
* Note there could be none, there could be multiple, ECDSA is weird. */
|
||||||
should_verify = 0;
|
should_verify = 0;
|
||||||
for (k = 0; k < order; k++) {
|
for (k = 0; k < order; k++) {
|
||||||
rustsecp256k1_v0_1_0_scalar check_x_s;
|
rustsecp256k1_v0_1_1_scalar check_x_s;
|
||||||
r_from_k(&check_x_s, group, k);
|
r_from_k(&check_x_s, group, k);
|
||||||
if (r_s == check_x_s) {
|
if (r_s == check_x_s) {
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&s_times_k_s, k);
|
rustsecp256k1_v0_1_1_scalar_set_int(&s_times_k_s, k);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
|
rustsecp256k1_v0_1_1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
|
rustsecp256k1_v0_1_1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
|
rustsecp256k1_v0_1_1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
|
||||||
should_verify |= rustsecp256k1_v0_1_0_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
|
should_verify |= rustsecp256k1_v0_1_1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* nb we have a "high s" rule */
|
/* nb we have a "high s" rule */
|
||||||
should_verify &= !rustsecp256k1_v0_1_0_scalar_is_high(&s_s);
|
should_verify &= !rustsecp256k1_v0_1_1_scalar_is_high(&s_s);
|
||||||
|
|
||||||
/* Verify by calling verify */
|
/* Verify by calling verify */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_save(&sig, &r_s, &s_s);
|
rustsecp256k1_v0_1_1_ecdsa_signature_save(&sig, &r_s, &s_s);
|
||||||
memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge));
|
memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge));
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(&pk, &nonconst_ge);
|
rustsecp256k1_v0_1_1_pubkey_save(&pk, &nonconst_ge);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(msg32, &msg_s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(msg32, &msg_s);
|
||||||
CHECK(should_verify ==
|
CHECK(should_verify ==
|
||||||
rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pk));
|
rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pk));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_exhaustive_sign(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_sign(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
int i, j, k;
|
int i, j, k;
|
||||||
|
|
||||||
/* Loop */
|
/* Loop */
|
||||||
|
@ -290,17 +290,17 @@ void test_exhaustive_sign(const rustsecp256k1_v0_1_0_context *ctx, const rustsec
|
||||||
for (j = 1; j < order; j++) { /* key */
|
for (j = 1; j < order; j++) { /* key */
|
||||||
for (k = 1; k < order; k++) { /* nonce */
|
for (k = 1; k < order; k++) { /* nonce */
|
||||||
const int starting_k = k;
|
const int starting_k = k;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
rustsecp256k1_v0_1_0_scalar sk, msg, r, s, expected_r;
|
rustsecp256k1_v0_1_1_scalar sk, msg, r, s, expected_r;
|
||||||
unsigned char sk32[32], msg32[32];
|
unsigned char sk32[32], msg32[32];
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&msg, i);
|
rustsecp256k1_v0_1_1_scalar_set_int(&msg, i);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&sk, j);
|
rustsecp256k1_v0_1_1_scalar_set_int(&sk, j);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(sk32, &sk);
|
rustsecp256k1_v0_1_1_scalar_get_b32(sk32, &sk);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(msg32, &msg);
|
rustsecp256k1_v0_1_1_scalar_get_b32(msg32, &msg);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_sign(ctx, &sig, msg32, sk32, rustsecp256k1_v0_1_0_nonce_function_smallint, &k);
|
rustsecp256k1_v0_1_1_ecdsa_sign(ctx, &sig, msg32, sk32, rustsecp256k1_v0_1_1_nonce_function_smallint, &k);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, &sig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, &sig);
|
||||||
/* Note that we compute expected_r *after* signing -- this is important
|
/* Note that we compute expected_r *after* signing -- this is important
|
||||||
* because our nonce-computing function function might change k during
|
* because our nonce-computing function function might change k during
|
||||||
* signing. */
|
* signing. */
|
||||||
|
@ -328,7 +328,7 @@ void test_exhaustive_sign(const rustsecp256k1_v0_1_0_context *ctx, const rustsec
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef ENABLE_MODULE_RECOVERY
|
#ifdef ENABLE_MODULE_RECOVERY
|
||||||
void test_exhaustive_recovery_sign(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_recovery_sign(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
int i, j, k;
|
int i, j, k;
|
||||||
|
|
||||||
/* Loop */
|
/* Loop */
|
||||||
|
@ -336,44 +336,44 @@ void test_exhaustive_recovery_sign(const rustsecp256k1_v0_1_0_context *ctx, cons
|
||||||
for (j = 1; j < order; j++) { /* key */
|
for (j = 1; j < order; j++) { /* key */
|
||||||
for (k = 1; k < order; k++) { /* nonce */
|
for (k = 1; k < order; k++) { /* nonce */
|
||||||
const int starting_k = k;
|
const int starting_k = k;
|
||||||
rustsecp256k1_v0_1_0_fe r_dot_y_normalized;
|
rustsecp256k1_v0_1_1_fe r_dot_y_normalized;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature rsig;
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature rsig;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
rustsecp256k1_v0_1_0_scalar sk, msg, r, s, expected_r;
|
rustsecp256k1_v0_1_1_scalar sk, msg, r, s, expected_r;
|
||||||
unsigned char sk32[32], msg32[32];
|
unsigned char sk32[32], msg32[32];
|
||||||
int expected_recid;
|
int expected_recid;
|
||||||
int recid;
|
int recid;
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&msg, i);
|
rustsecp256k1_v0_1_1_scalar_set_int(&msg, i);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&sk, j);
|
rustsecp256k1_v0_1_1_scalar_set_int(&sk, j);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(sk32, &sk);
|
rustsecp256k1_v0_1_1_scalar_get_b32(sk32, &sk);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(msg32, &msg);
|
rustsecp256k1_v0_1_1_scalar_get_b32(msg32, &msg);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_ecdsa_sign_recoverable(ctx, &rsig, msg32, sk32, rustsecp256k1_v0_1_0_nonce_function_smallint, &k);
|
rustsecp256k1_v0_1_1_ecdsa_sign_recoverable(ctx, &rsig, msg32, sk32, rustsecp256k1_v0_1_1_nonce_function_smallint, &k);
|
||||||
|
|
||||||
/* Check directly */
|
/* Check directly */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, &rsig);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, &rsig);
|
||||||
r_from_k(&expected_r, group, k);
|
r_from_k(&expected_r, group, k);
|
||||||
CHECK(r == expected_r);
|
CHECK(r == expected_r);
|
||||||
CHECK((k * s) % order == (i + r * j) % order ||
|
CHECK((k * s) % order == (i + r * j) % order ||
|
||||||
(k * (EXHAUSTIVE_TEST_ORDER - s)) % order == (i + r * j) % order);
|
(k * (EXHAUSTIVE_TEST_ORDER - s)) % order == (i + r * j) % order);
|
||||||
/* In computing the recid, there is an overflow condition that is disabled in
|
/* In computing the recid, there is an overflow condition that is disabled in
|
||||||
* scalar_low_impl.h `rustsecp256k1_v0_1_0_scalar_set_b32` because almost every r.y value
|
* scalar_low_impl.h `rustsecp256k1_v0_1_1_scalar_set_b32` because almost every r.y value
|
||||||
* will exceed the group order, and our signing code always holds out for r
|
* will exceed the group order, and our signing code always holds out for r
|
||||||
* values that don't overflow, so with a proper overflow check the tests would
|
* values that don't overflow, so with a proper overflow check the tests would
|
||||||
* loop indefinitely. */
|
* loop indefinitely. */
|
||||||
r_dot_y_normalized = group[k].y;
|
r_dot_y_normalized = group[k].y;
|
||||||
rustsecp256k1_v0_1_0_fe_normalize(&r_dot_y_normalized);
|
rustsecp256k1_v0_1_1_fe_normalize(&r_dot_y_normalized);
|
||||||
/* Also the recovery id is flipped depending if we hit the low-s branch */
|
/* Also the recovery id is flipped depending if we hit the low-s branch */
|
||||||
if ((k * s) % order == (i + r * j) % order) {
|
if ((k * s) % order == (i + r * j) % order) {
|
||||||
expected_recid = rustsecp256k1_v0_1_0_fe_is_odd(&r_dot_y_normalized) ? 1 : 0;
|
expected_recid = rustsecp256k1_v0_1_1_fe_is_odd(&r_dot_y_normalized) ? 1 : 0;
|
||||||
} else {
|
} else {
|
||||||
expected_recid = rustsecp256k1_v0_1_0_fe_is_odd(&r_dot_y_normalized) ? 0 : 1;
|
expected_recid = rustsecp256k1_v0_1_1_fe_is_odd(&r_dot_y_normalized) ? 0 : 1;
|
||||||
}
|
}
|
||||||
CHECK(recid == expected_recid);
|
CHECK(recid == expected_recid);
|
||||||
|
|
||||||
/* Convert to a standard sig then check */
|
/* Convert to a standard sig then check */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig);
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature_load(ctx, &r, &s, &sig);
|
rustsecp256k1_v0_1_1_ecdsa_signature_load(ctx, &r, &s, &sig);
|
||||||
/* Note that we compute expected_r *after* signing -- this is important
|
/* Note that we compute expected_r *after* signing -- this is important
|
||||||
* because our nonce-computing function function might change k during
|
* because our nonce-computing function function might change k during
|
||||||
* signing. */
|
* signing. */
|
||||||
|
@ -391,46 +391,46 @@ void test_exhaustive_recovery_sign(const rustsecp256k1_v0_1_0_context *ctx, cons
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_exhaustive_recovery_verify(const rustsecp256k1_v0_1_0_context *ctx, const rustsecp256k1_v0_1_0_ge *group, int order) {
|
void test_exhaustive_recovery_verify(const rustsecp256k1_v0_1_1_context *ctx, const rustsecp256k1_v0_1_1_ge *group, int order) {
|
||||||
/* This is essentially a copy of test_exhaustive_verify, with recovery added */
|
/* This is essentially a copy of test_exhaustive_verify, with recovery added */
|
||||||
int s, r, msg, key;
|
int s, r, msg, key;
|
||||||
for (s = 1; s < order; s++) {
|
for (s = 1; s < order; s++) {
|
||||||
for (r = 1; r < order; r++) {
|
for (r = 1; r < order; r++) {
|
||||||
for (msg = 1; msg < order; msg++) {
|
for (msg = 1; msg < order; msg++) {
|
||||||
for (key = 1; key < order; key++) {
|
for (key = 1; key < order; key++) {
|
||||||
rustsecp256k1_v0_1_0_ge nonconst_ge;
|
rustsecp256k1_v0_1_1_ge nonconst_ge;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature rsig;
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature rsig;
|
||||||
rustsecp256k1_v0_1_0_ecdsa_signature sig;
|
rustsecp256k1_v0_1_1_ecdsa_signature sig;
|
||||||
rustsecp256k1_v0_1_0_pubkey pk;
|
rustsecp256k1_v0_1_1_pubkey pk;
|
||||||
rustsecp256k1_v0_1_0_scalar sk_s, msg_s, r_s, s_s;
|
rustsecp256k1_v0_1_1_scalar sk_s, msg_s, r_s, s_s;
|
||||||
rustsecp256k1_v0_1_0_scalar s_times_k_s, msg_plus_r_times_sk_s;
|
rustsecp256k1_v0_1_1_scalar s_times_k_s, msg_plus_r_times_sk_s;
|
||||||
int recid = 0;
|
int recid = 0;
|
||||||
int k, should_verify;
|
int k, should_verify;
|
||||||
unsigned char msg32[32];
|
unsigned char msg32[32];
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&s_s, s);
|
rustsecp256k1_v0_1_1_scalar_set_int(&s_s, s);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&r_s, r);
|
rustsecp256k1_v0_1_1_scalar_set_int(&r_s, r);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&msg_s, msg);
|
rustsecp256k1_v0_1_1_scalar_set_int(&msg_s, msg);
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&sk_s, key);
|
rustsecp256k1_v0_1_1_scalar_set_int(&sk_s, key);
|
||||||
rustsecp256k1_v0_1_0_scalar_get_b32(msg32, &msg_s);
|
rustsecp256k1_v0_1_1_scalar_get_b32(msg32, &msg_s);
|
||||||
|
|
||||||
/* Verify by hand */
|
/* Verify by hand */
|
||||||
/* Run through every k value that gives us this r and check that *one* works.
|
/* Run through every k value that gives us this r and check that *one* works.
|
||||||
* Note there could be none, there could be multiple, ECDSA is weird. */
|
* Note there could be none, there could be multiple, ECDSA is weird. */
|
||||||
should_verify = 0;
|
should_verify = 0;
|
||||||
for (k = 0; k < order; k++) {
|
for (k = 0; k < order; k++) {
|
||||||
rustsecp256k1_v0_1_0_scalar check_x_s;
|
rustsecp256k1_v0_1_1_scalar check_x_s;
|
||||||
r_from_k(&check_x_s, group, k);
|
r_from_k(&check_x_s, group, k);
|
||||||
if (r_s == check_x_s) {
|
if (r_s == check_x_s) {
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&s_times_k_s, k);
|
rustsecp256k1_v0_1_1_scalar_set_int(&s_times_k_s, k);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
|
rustsecp256k1_v0_1_1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s);
|
||||||
rustsecp256k1_v0_1_0_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
|
rustsecp256k1_v0_1_1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s);
|
||||||
rustsecp256k1_v0_1_0_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
|
rustsecp256k1_v0_1_1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s);
|
||||||
should_verify |= rustsecp256k1_v0_1_0_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
|
should_verify |= rustsecp256k1_v0_1_1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* nb we have a "high s" rule */
|
/* nb we have a "high s" rule */
|
||||||
should_verify &= !rustsecp256k1_v0_1_0_scalar_is_high(&s_s);
|
should_verify &= !rustsecp256k1_v0_1_1_scalar_is_high(&s_s);
|
||||||
|
|
||||||
/* We would like to try recovering the pubkey and checking that it matches,
|
/* We would like to try recovering the pubkey and checking that it matches,
|
||||||
* but pubkey recovery is impossible in the exhaustive tests (the reason
|
* but pubkey recovery is impossible in the exhaustive tests (the reason
|
||||||
|
@ -438,12 +438,12 @@ void test_exhaustive_recovery_verify(const rustsecp256k1_v0_1_0_context *ctx, co
|
||||||
* overlap between the sets, so there are no valid signatures). */
|
* overlap between the sets, so there are no valid signatures). */
|
||||||
|
|
||||||
/* Verify by converting to a standard signature and calling verify */
|
/* Verify by converting to a standard signature and calling verify */
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_save(&rsig, &r_s, &s_s, recid);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_save(&rsig, &r_s, &s_s, recid);
|
||||||
rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig);
|
rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig);
|
||||||
memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge));
|
memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge));
|
||||||
rustsecp256k1_v0_1_0_pubkey_save(&pk, &nonconst_ge);
|
rustsecp256k1_v0_1_1_pubkey_save(&pk, &nonconst_ge);
|
||||||
CHECK(should_verify ==
|
CHECK(should_verify ==
|
||||||
rustsecp256k1_v0_1_0_ecdsa_verify(ctx, &sig, msg32, &pk));
|
rustsecp256k1_v0_1_1_ecdsa_verify(ctx, &sig, msg32, &pk));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -453,40 +453,40 @@ void test_exhaustive_recovery_verify(const rustsecp256k1_v0_1_0_context *ctx, co
|
||||||
|
|
||||||
int main(void) {
|
int main(void) {
|
||||||
int i;
|
int i;
|
||||||
rustsecp256k1_v0_1_0_gej groupj[EXHAUSTIVE_TEST_ORDER];
|
rustsecp256k1_v0_1_1_gej groupj[EXHAUSTIVE_TEST_ORDER];
|
||||||
rustsecp256k1_v0_1_0_ge group[EXHAUSTIVE_TEST_ORDER];
|
rustsecp256k1_v0_1_1_ge group[EXHAUSTIVE_TEST_ORDER];
|
||||||
|
|
||||||
/* Build context */
|
/* Build context */
|
||||||
rustsecp256k1_v0_1_0_context *ctx = rustsecp256k1_v0_1_0_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
rustsecp256k1_v0_1_1_context *ctx = rustsecp256k1_v0_1_1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY);
|
||||||
|
|
||||||
/* TODO set z = 1, then do num_tests runs with random z values */
|
/* TODO set z = 1, then do num_tests runs with random z values */
|
||||||
|
|
||||||
/* Generate the entire group */
|
/* Generate the entire group */
|
||||||
rustsecp256k1_v0_1_0_gej_set_infinity(&groupj[0]);
|
rustsecp256k1_v0_1_1_gej_set_infinity(&groupj[0]);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&group[0], &groupj[0]);
|
rustsecp256k1_v0_1_1_ge_set_gej(&group[0], &groupj[0]);
|
||||||
for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) {
|
for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) {
|
||||||
/* Set a different random z-value for each Jacobian point */
|
/* Set a different random z-value for each Jacobian point */
|
||||||
rustsecp256k1_v0_1_0_fe z;
|
rustsecp256k1_v0_1_1_fe z;
|
||||||
random_fe(&z);
|
random_fe(&z);
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_gej_add_ge(&groupj[i], &groupj[i - 1], &rustsecp256k1_v0_1_0_ge_const_g);
|
rustsecp256k1_v0_1_1_gej_add_ge(&groupj[i], &groupj[i - 1], &rustsecp256k1_v0_1_1_ge_const_g);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&group[i], &groupj[i]);
|
rustsecp256k1_v0_1_1_ge_set_gej(&group[i], &groupj[i]);
|
||||||
rustsecp256k1_v0_1_0_gej_rescale(&groupj[i], &z);
|
rustsecp256k1_v0_1_1_gej_rescale(&groupj[i], &z);
|
||||||
|
|
||||||
/* Verify against ecmult_gen */
|
/* Verify against ecmult_gen */
|
||||||
{
|
{
|
||||||
rustsecp256k1_v0_1_0_scalar scalar_i;
|
rustsecp256k1_v0_1_1_scalar scalar_i;
|
||||||
rustsecp256k1_v0_1_0_gej generatedj;
|
rustsecp256k1_v0_1_1_gej generatedj;
|
||||||
rustsecp256k1_v0_1_0_ge generated;
|
rustsecp256k1_v0_1_1_ge generated;
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_scalar_set_int(&scalar_i, i);
|
rustsecp256k1_v0_1_1_scalar_set_int(&scalar_i, i);
|
||||||
rustsecp256k1_v0_1_0_ecmult_gen(&ctx->ecmult_gen_ctx, &generatedj, &scalar_i);
|
rustsecp256k1_v0_1_1_ecmult_gen(&ctx->ecmult_gen_ctx, &generatedj, &scalar_i);
|
||||||
rustsecp256k1_v0_1_0_ge_set_gej(&generated, &generatedj);
|
rustsecp256k1_v0_1_1_ge_set_gej(&generated, &generatedj);
|
||||||
|
|
||||||
CHECK(group[i].infinity == 0);
|
CHECK(group[i].infinity == 0);
|
||||||
CHECK(generated.infinity == 0);
|
CHECK(generated.infinity == 0);
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&generated.x, &group[i].x));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&generated.x, &group[i].x));
|
||||||
CHECK(rustsecp256k1_v0_1_0_fe_equal_var(&generated.y, &group[i].y));
|
CHECK(rustsecp256k1_v0_1_1_fe_equal_var(&generated.y, &group[i].y));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -505,7 +505,7 @@ int main(void) {
|
||||||
test_exhaustive_recovery_verify(ctx, group, EXHAUSTIVE_TEST_ORDER);
|
test_exhaustive_recovery_verify(ctx, group, EXHAUSTIVE_TEST_ORDER);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
rustsecp256k1_v0_1_0_context_destroy(ctx);
|
rustsecp256k1_v0_1_1_context_destroy(ctx);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,9 @@
|
||||||
typedef struct {
|
typedef struct {
|
||||||
void (*fn)(const char *text, void* data);
|
void (*fn)(const char *text, void* data);
|
||||||
const void* data;
|
const void* data;
|
||||||
} rustsecp256k1_v0_1_0_callback;
|
} rustsecp256k1_v0_1_1_callback;
|
||||||
|
|
||||||
static SECP256K1_INLINE void rustsecp256k1_v0_1_0_callback_call(const rustsecp256k1_v0_1_0_callback * const cb, const char * const text) {
|
static SECP256K1_INLINE void rustsecp256k1_v0_1_1_callback_call(const rustsecp256k1_v0_1_1_callback * const cb, const char * const text) {
|
||||||
cb->fn(text, (void*)cb->data);
|
cb->fn(text, (void*)cb->data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -138,91 +138,91 @@ impl Default for Signature {
|
||||||
#[cfg(not(feature = "fuzztarget"))]
|
#[cfg(not(feature = "fuzztarget"))]
|
||||||
extern "C" {
|
extern "C" {
|
||||||
/// Default ECDH hash function
|
/// Default ECDH hash function
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdh_hash_function_default")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdh_hash_function_default")]
|
||||||
pub static secp256k1_ecdh_hash_function_default: EcdhHashFn;
|
pub static secp256k1_ecdh_hash_function_default: EcdhHashFn;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_nonce_function_rfc6979")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_nonce_function_rfc6979")]
|
||||||
pub static secp256k1_nonce_function_rfc6979: NonceFn;
|
pub static secp256k1_nonce_function_rfc6979: NonceFn;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_nonce_function_default")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_nonce_function_default")]
|
||||||
pub static secp256k1_nonce_function_default: NonceFn;
|
pub static secp256k1_nonce_function_default: NonceFn;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_no_precomp")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_no_precomp")]
|
||||||
pub static secp256k1_context_no_precomp: *const Context;
|
pub static secp256k1_context_no_precomp: *const Context;
|
||||||
|
|
||||||
// Contexts
|
// Contexts
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_preallocated_size")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_preallocated_size")]
|
||||||
pub fn secp256k1_context_preallocated_size(flags: c_uint) -> usize;
|
pub fn secp256k1_context_preallocated_size(flags: c_uint) -> usize;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_preallocated_create")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_preallocated_create")]
|
||||||
pub fn secp256k1_context_preallocated_create(prealloc: *mut c_void, flags: c_uint) -> *mut Context;
|
pub fn secp256k1_context_preallocated_create(prealloc: *mut c_void, flags: c_uint) -> *mut Context;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_preallocated_destroy")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_preallocated_destroy")]
|
||||||
pub fn secp256k1_context_preallocated_destroy(cx: *mut Context);
|
pub fn secp256k1_context_preallocated_destroy(cx: *mut Context);
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_preallocated_clone_size")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_preallocated_clone_size")]
|
||||||
pub fn secp256k1_context_preallocated_clone_size(cx: *const Context) -> usize;
|
pub fn secp256k1_context_preallocated_clone_size(cx: *const Context) -> usize;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_preallocated_clone")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_preallocated_clone")]
|
||||||
pub fn secp256k1_context_preallocated_clone(cx: *const Context, prealloc: *mut c_void) -> *mut Context;
|
pub fn secp256k1_context_preallocated_clone(cx: *const Context, prealloc: *mut c_void) -> *mut Context;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_context_randomize")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_context_randomize")]
|
||||||
pub fn secp256k1_context_randomize(cx: *mut Context,
|
pub fn secp256k1_context_randomize(cx: *mut Context,
|
||||||
seed32: *const c_uchar)
|
seed32: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
// Pubkeys
|
// Pubkeys
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_parse")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_parse")]
|
||||||
pub fn secp256k1_ec_pubkey_parse(cx: *const Context, pk: *mut PublicKey,
|
pub fn secp256k1_ec_pubkey_parse(cx: *const Context, pk: *mut PublicKey,
|
||||||
input: *const c_uchar, in_len: usize)
|
input: *const c_uchar, in_len: usize)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_serialize")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_serialize")]
|
||||||
pub fn secp256k1_ec_pubkey_serialize(cx: *const Context, output: *mut c_uchar,
|
pub fn secp256k1_ec_pubkey_serialize(cx: *const Context, output: *mut c_uchar,
|
||||||
out_len: *mut usize, pk: *const PublicKey,
|
out_len: *mut usize, pk: *const PublicKey,
|
||||||
compressed: c_uint)
|
compressed: c_uint)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
// Signatures
|
// Signatures
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_parse_der")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_parse_der")]
|
||||||
pub fn secp256k1_ecdsa_signature_parse_der(cx: *const Context, sig: *mut Signature,
|
pub fn secp256k1_ecdsa_signature_parse_der(cx: *const Context, sig: *mut Signature,
|
||||||
input: *const c_uchar, in_len: usize)
|
input: *const c_uchar, in_len: usize)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_parse_compact")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_parse_compact")]
|
||||||
pub fn secp256k1_ecdsa_signature_parse_compact(cx: *const Context, sig: *mut Signature,
|
pub fn secp256k1_ecdsa_signature_parse_compact(cx: *const Context, sig: *mut Signature,
|
||||||
input64: *const c_uchar)
|
input64: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_parse_der_lax")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_parse_der_lax")]
|
||||||
pub fn ecdsa_signature_parse_der_lax(cx: *const Context, sig: *mut Signature,
|
pub fn ecdsa_signature_parse_der_lax(cx: *const Context, sig: *mut Signature,
|
||||||
input: *const c_uchar, in_len: usize)
|
input: *const c_uchar, in_len: usize)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_serialize_der")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_serialize_der")]
|
||||||
pub fn secp256k1_ecdsa_signature_serialize_der(cx: *const Context, output: *mut c_uchar,
|
pub fn secp256k1_ecdsa_signature_serialize_der(cx: *const Context, output: *mut c_uchar,
|
||||||
out_len: *mut usize, sig: *const Signature)
|
out_len: *mut usize, sig: *const Signature)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_serialize_compact")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_serialize_compact")]
|
||||||
pub fn secp256k1_ecdsa_signature_serialize_compact(cx: *const Context, output64: *mut c_uchar,
|
pub fn secp256k1_ecdsa_signature_serialize_compact(cx: *const Context, output64: *mut c_uchar,
|
||||||
sig: *const Signature)
|
sig: *const Signature)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_signature_normalize")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_signature_normalize")]
|
||||||
pub fn secp256k1_ecdsa_signature_normalize(cx: *const Context, out_sig: *mut Signature,
|
pub fn secp256k1_ecdsa_signature_normalize(cx: *const Context, out_sig: *mut Signature,
|
||||||
in_sig: *const Signature)
|
in_sig: *const Signature)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
// ECDSA
|
// ECDSA
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_verify")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_verify")]
|
||||||
pub fn secp256k1_ecdsa_verify(cx: *const Context,
|
pub fn secp256k1_ecdsa_verify(cx: *const Context,
|
||||||
sig: *const Signature,
|
sig: *const Signature,
|
||||||
msg32: *const c_uchar,
|
msg32: *const c_uchar,
|
||||||
pk: *const PublicKey)
|
pk: *const PublicKey)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_sign")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_sign")]
|
||||||
pub fn secp256k1_ecdsa_sign(cx: *const Context,
|
pub fn secp256k1_ecdsa_sign(cx: *const Context,
|
||||||
sig: *mut Signature,
|
sig: *mut Signature,
|
||||||
msg32: *const c_uchar,
|
msg32: *const c_uchar,
|
||||||
|
@ -232,49 +232,49 @@ extern "C" {
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
// EC
|
// EC
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_seckey_verify")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_seckey_verify")]
|
||||||
pub fn secp256k1_ec_seckey_verify(cx: *const Context,
|
pub fn secp256k1_ec_seckey_verify(cx: *const Context,
|
||||||
sk: *const c_uchar) -> c_int;
|
sk: *const c_uchar) -> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_create")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_create")]
|
||||||
pub fn secp256k1_ec_pubkey_create(cx: *const Context, pk: *mut PublicKey,
|
pub fn secp256k1_ec_pubkey_create(cx: *const Context, pk: *mut PublicKey,
|
||||||
sk: *const c_uchar) -> c_int;
|
sk: *const c_uchar) -> c_int;
|
||||||
|
|
||||||
//TODO secp256k1_ec_privkey_export
|
//TODO secp256k1_ec_privkey_export
|
||||||
//TODO secp256k1_ec_privkey_import
|
//TODO secp256k1_ec_privkey_import
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_privkey_tweak_add")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_privkey_tweak_add")]
|
||||||
pub fn secp256k1_ec_privkey_tweak_add(cx: *const Context,
|
pub fn secp256k1_ec_privkey_tweak_add(cx: *const Context,
|
||||||
sk: *mut c_uchar,
|
sk: *mut c_uchar,
|
||||||
tweak: *const c_uchar)
|
tweak: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_tweak_add")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_tweak_add")]
|
||||||
pub fn secp256k1_ec_pubkey_tweak_add(cx: *const Context,
|
pub fn secp256k1_ec_pubkey_tweak_add(cx: *const Context,
|
||||||
pk: *mut PublicKey,
|
pk: *mut PublicKey,
|
||||||
tweak: *const c_uchar)
|
tweak: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_privkey_tweak_mul")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_privkey_tweak_mul")]
|
||||||
pub fn secp256k1_ec_privkey_tweak_mul(cx: *const Context,
|
pub fn secp256k1_ec_privkey_tweak_mul(cx: *const Context,
|
||||||
sk: *mut c_uchar,
|
sk: *mut c_uchar,
|
||||||
tweak: *const c_uchar)
|
tweak: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_tweak_mul")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_tweak_mul")]
|
||||||
pub fn secp256k1_ec_pubkey_tweak_mul(cx: *const Context,
|
pub fn secp256k1_ec_pubkey_tweak_mul(cx: *const Context,
|
||||||
pk: *mut PublicKey,
|
pk: *mut PublicKey,
|
||||||
tweak: *const c_uchar)
|
tweak: *const c_uchar)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ec_pubkey_combine")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ec_pubkey_combine")]
|
||||||
pub fn secp256k1_ec_pubkey_combine(cx: *const Context,
|
pub fn secp256k1_ec_pubkey_combine(cx: *const Context,
|
||||||
out: *mut PublicKey,
|
out: *mut PublicKey,
|
||||||
ins: *const *const PublicKey,
|
ins: *const *const PublicKey,
|
||||||
n: c_int)
|
n: c_int)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdh")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdh")]
|
||||||
pub fn secp256k1_ecdh(
|
pub fn secp256k1_ecdh(
|
||||||
cx: *const Context,
|
cx: *const Context,
|
||||||
output: *mut c_uchar,
|
output: *mut c_uchar,
|
||||||
|
@ -296,7 +296,7 @@ extern "C" {
|
||||||
// In: flags: which parts of the context to initialize.
|
// In: flags: which parts of the context to initialize.
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
||||||
pub unsafe extern "C" fn rustsecp256k1_v0_1_0_context_create(flags: c_uint) -> *mut Context {
|
pub unsafe extern "C" fn rustsecp256k1_v0_1_1_context_create(flags: c_uint) -> *mut Context {
|
||||||
use std::mem;
|
use std::mem;
|
||||||
assert!(mem::align_of::<usize>() >= mem::align_of::<u8>());
|
assert!(mem::align_of::<usize>() >= mem::align_of::<u8>());
|
||||||
assert_eq!(mem::size_of::<usize>(), mem::size_of::<&usize>());
|
assert_eq!(mem::size_of::<usize>(), mem::size_of::<&usize>());
|
||||||
|
@ -314,7 +314,7 @@ pub unsafe extern "C" fn rustsecp256k1_v0_1_0_context_create(flags: c_uint) -> *
|
||||||
|
|
||||||
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
||||||
pub unsafe fn secp256k1_context_create(flags: c_uint) -> *mut Context {
|
pub unsafe fn secp256k1_context_create(flags: c_uint) -> *mut Context {
|
||||||
rustsecp256k1_v0_1_0_context_create(flags)
|
rustsecp256k1_v0_1_1_context_create(flags)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A reimplementation of the C function `secp256k1_context_destroy` in rust.
|
/// A reimplementation of the C function `secp256k1_context_destroy` in rust.
|
||||||
|
@ -325,7 +325,7 @@ pub unsafe fn secp256k1_context_create(flags: c_uint) -> *mut Context {
|
||||||
///
|
///
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
||||||
pub unsafe extern "C" fn rustsecp256k1_v0_1_0_context_destroy(ctx: *mut Context) {
|
pub unsafe extern "C" fn rustsecp256k1_v0_1_1_context_destroy(ctx: *mut Context) {
|
||||||
secp256k1_context_preallocated_destroy(ctx);
|
secp256k1_context_preallocated_destroy(ctx);
|
||||||
let ctx: *mut usize = ctx as *mut usize;
|
let ctx: *mut usize = ctx as *mut usize;
|
||||||
|
|
||||||
|
@ -337,7 +337,7 @@ pub unsafe extern "C" fn rustsecp256k1_v0_1_0_context_destroy(ctx: *mut Context)
|
||||||
|
|
||||||
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
#[cfg(all(feature = "std", not(feature = "external_symbols")))]
|
||||||
pub unsafe fn secp256k1_context_destroy(ctx: *mut Context) {
|
pub unsafe fn secp256k1_context_destroy(ctx: *mut Context) {
|
||||||
rustsecp256k1_v0_1_0_context_destroy(ctx)
|
rustsecp256k1_v0_1_1_context_destroy(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -361,7 +361,7 @@ pub unsafe fn secp256k1_context_destroy(ctx: *mut Context) {
|
||||||
///
|
///
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
#[cfg(not(feature = "external_symbols"))]
|
#[cfg(not(feature = "external_symbols"))]
|
||||||
pub unsafe extern "C" fn rustsecp256k1_v0_1_0_default_illegal_callback_fn(message: *const c_char, _data: *mut c_void) {
|
pub unsafe extern "C" fn rustsecp256k1_v0_1_1_default_illegal_callback_fn(message: *const c_char, _data: *mut c_void) {
|
||||||
use core::str;
|
use core::str;
|
||||||
let msg_slice = slice::from_raw_parts(message as *const u8, strlen(message));
|
let msg_slice = slice::from_raw_parts(message as *const u8, strlen(message));
|
||||||
let msg = str::from_utf8_unchecked(msg_slice);
|
let msg = str::from_utf8_unchecked(msg_slice);
|
||||||
|
@ -384,7 +384,7 @@ pub unsafe extern "C" fn rustsecp256k1_v0_1_0_default_illegal_callback_fn(messag
|
||||||
///
|
///
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
#[cfg(not(feature = "external_symbols"))]
|
#[cfg(not(feature = "external_symbols"))]
|
||||||
pub unsafe extern "C" fn rustsecp256k1_v0_1_0_default_error_callback_fn(message: *const c_char, _data: *mut c_void) {
|
pub unsafe extern "C" fn rustsecp256k1_v0_1_1_default_error_callback_fn(message: *const c_char, _data: *mut c_void) {
|
||||||
use core::str;
|
use core::str;
|
||||||
let msg_slice = slice::from_raw_parts(message as *const u8, strlen(message));
|
let msg_slice = slice::from_raw_parts(message as *const u8, strlen(message));
|
||||||
let msg = str::from_utf8_unchecked(msg_slice);
|
let msg = str::from_utf8_unchecked(msg_slice);
|
||||||
|
|
|
@ -41,21 +41,21 @@ impl Default for RecoverableSignature {
|
||||||
|
|
||||||
#[cfg(not(feature = "fuzztarget"))]
|
#[cfg(not(feature = "fuzztarget"))]
|
||||||
extern "C" {
|
extern "C" {
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_parse_compact")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_parse_compact")]
|
||||||
pub fn secp256k1_ecdsa_recoverable_signature_parse_compact(cx: *const Context, sig: *mut RecoverableSignature,
|
pub fn secp256k1_ecdsa_recoverable_signature_parse_compact(cx: *const Context, sig: *mut RecoverableSignature,
|
||||||
input64: *const c_uchar, recid: c_int)
|
input64: *const c_uchar, recid: c_int)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_serialize_compact")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_serialize_compact")]
|
||||||
pub fn secp256k1_ecdsa_recoverable_signature_serialize_compact(cx: *const Context, output64: *mut c_uchar,
|
pub fn secp256k1_ecdsa_recoverable_signature_serialize_compact(cx: *const Context, output64: *mut c_uchar,
|
||||||
recid: *mut c_int, sig: *const RecoverableSignature)
|
recid: *mut c_int, sig: *const RecoverableSignature)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_recoverable_signature_convert")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_recoverable_signature_convert")]
|
||||||
pub fn secp256k1_ecdsa_recoverable_signature_convert(cx: *const Context, sig: *mut Signature,
|
pub fn secp256k1_ecdsa_recoverable_signature_convert(cx: *const Context, sig: *mut Signature,
|
||||||
input: *const RecoverableSignature)
|
input: *const RecoverableSignature)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_sign_recoverable")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_sign_recoverable")]
|
||||||
pub fn secp256k1_ecdsa_sign_recoverable(cx: *const Context,
|
pub fn secp256k1_ecdsa_sign_recoverable(cx: *const Context,
|
||||||
sig: *mut RecoverableSignature,
|
sig: *mut RecoverableSignature,
|
||||||
msg32: *const c_uchar,
|
msg32: *const c_uchar,
|
||||||
|
@ -64,7 +64,7 @@ extern "C" {
|
||||||
noncedata: *const c_void)
|
noncedata: *const c_void)
|
||||||
-> c_int;
|
-> c_int;
|
||||||
|
|
||||||
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_0_ecdsa_recover")]
|
#[cfg_attr(not(feature = "external-symbols"), link_name = "rustsecp256k1_v0_1_1_ecdsa_recover")]
|
||||||
pub fn secp256k1_ecdsa_recover(cx: *const Context,
|
pub fn secp256k1_ecdsa_recover(cx: *const Context,
|
||||||
pk: *mut PublicKey,
|
pk: *mut PublicKey,
|
||||||
sig: *const RecoverableSignature,
|
sig: *const RecoverableSignature,
|
||||||
|
|
|
@ -55,7 +55,7 @@ find "$DIR" -not -path '*/\.*' -type f -print0 | xargs -0 sed -i "/^#include/! s
|
||||||
find "$DIR" -not -path '*/\.*' -type f -print0 | xargs -0 sed -i 's/^const int CURVE_B/static const int CURVE_B/g'
|
find "$DIR" -not -path '*/\.*' -type f -print0 | xargs -0 sed -i 's/^const int CURVE_B/static const int CURVE_B/g'
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
read -r -p "Update Rust extern references as well? [yn]: " yn
|
read -r -p "Update Rust extern references and Cargo.toml as well? [yn]: " yn
|
||||||
case $yn in
|
case $yn in
|
||||||
[Yy]* ) break;;
|
[Yy]* ) break;;
|
||||||
[Nn]* ) exit;;
|
[Nn]* ) exit;;
|
||||||
|
@ -64,5 +64,10 @@ while true; do
|
||||||
done
|
done
|
||||||
|
|
||||||
cd "$ORIGDIR"
|
cd "$ORIGDIR"
|
||||||
|
|
||||||
|
# Update the `links = ` in the manifest file.
|
||||||
|
sed -i -r "s/^links = \".*\"$/links = \"rustsecp256k1_v${VERSIONCODE}\"/" Cargo.toml
|
||||||
|
|
||||||
|
# Update the extern references in the Rust FFI source files.
|
||||||
find "./src/" -name "*.rs" -type f -print0 | xargs -0 sed -i -r "s/rustsecp256k1_v[0-9]+_[0-9]+_[0-9]+_(.*)([\"\(])/rustsecp256k1_v${VERSIONCODE}_\1\2/g"
|
find "./src/" -name "*.rs" -type f -print0 | xargs -0 sed -i -r "s/rustsecp256k1_v[0-9]+_[0-9]+_[0-9]+_(.*)([\"\(])/rustsecp256k1_v${VERSIONCODE}_\1\2/g"
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue