2015-10-26 14:54:21 +00:00
|
|
|
/**********************************************************************
|
|
|
|
* Copyright (c) 2014 Pieter Wuille *
|
|
|
|
* Distributed under the MIT software license, see the accompanying *
|
|
|
|
* file COPYING or http://www.opensource.org/licenses/mit-license.php.*
|
|
|
|
**********************************************************************/
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
#ifndef SECP256K1_SCALAR_H
|
|
|
|
#define SECP256K1_SCALAR_H
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
#include "num.h"
|
|
|
|
|
|
|
|
#if defined HAVE_CONFIG_H
|
|
|
|
#include "libsecp256k1-config.h"
|
|
|
|
#endif
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
#if defined(EXHAUSTIVE_TEST_ORDER)
|
|
|
|
#include "scalar_low.h"
|
|
|
|
#elif defined(USE_SCALAR_4X64)
|
2015-10-26 14:54:21 +00:00
|
|
|
#include "scalar_4x64.h"
|
|
|
|
#elif defined(USE_SCALAR_8X32)
|
|
|
|
#include "scalar_8x32.h"
|
|
|
|
#else
|
|
|
|
#error "Please select scalar implementation"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/** Clear a scalar to prevent the leak of sensitive data. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_clear(rustsecp256k1_v0_1_0_scalar *r);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Access bits from a scalar. All requested bits must belong to the same 32-bit limb. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Access bits from a scalar. Not constant time. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static unsigned int rustsecp256k1_v0_1_0_scalar_get_bits_var(const rustsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Set a scalar from a big endian byte array. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_set_b32(rustsecp256k1_v0_1_0_scalar *r, const unsigned char *bin, int *overflow);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Set a scalar to an unsigned integer. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_set_int(rustsecp256k1_v0_1_0_scalar *r, unsigned int v);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Convert a scalar to a byte array. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_0_scalar* a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Add two scalars together (modulo the group order). Returns whether it overflowed. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_add(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Conditionally add a power of two to a scalar. The result is not allowed to overflow. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_cadd_bit(rustsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Multiply two scalars (modulo the group order). */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_mul(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Shift a scalar right by some amount strictly between 0 and 16, returning
|
|
|
|
* the low bits that were shifted off */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_shr_int(rustsecp256k1_v0_1_0_scalar *r, int n);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Compute the square of a scalar (modulo the group order). */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_sqr(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Compute the inverse of a scalar (modulo the group order). */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_inverse(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Compute the inverse of a scalar (modulo the group order), without constant-time guarantee. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_inverse_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Compute the complement of a scalar (modulo the group order). */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_negate(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Check whether a scalar equals zero. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_is_zero(const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Check whether a scalar equals one. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_is_one(const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Check whether a scalar, considered as an nonnegative integer, is even. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_is_even(const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Check whether a scalar is higher than the group order divided by 2. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_is_high(const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Conditionally negate a number, in constant time.
|
|
|
|
* Returns -1 if the number was negated, 1 otherwise */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_cond_negate(rustsecp256k1_v0_1_0_scalar *a, int flag);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
#ifndef USE_NUM_NONE
|
|
|
|
/** Convert a scalar to a number. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_get_num(rustsecp256k1_v0_1_0_num *r, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
/** Get the order of the group as a number. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_order_get_num(rustsecp256k1_v0_1_0_num *r);
|
2015-10-26 14:54:21 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
/** Compare two scalars. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static int rustsecp256k1_v0_1_0_scalar_eq(const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
|
|
|
#ifdef USE_ENDOMORPHISM
|
|
|
|
/** Find r1 and r2 such that r1+r2*2^128 = a. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_split_128(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a);
|
|
|
|
/** Find r1 and r2 such that r1+r2*lambda = a, and r1 and r2 are maximum 128 bits long (see rustsecp256k1_v0_1_0_gej_mul_lambda). */
|
|
|
|
static void rustsecp256k1_v0_1_0_scalar_split_lambda(rustsecp256k1_v0_1_0_scalar *r1, rustsecp256k1_v0_1_0_scalar *r2, const rustsecp256k1_v0_1_0_scalar *a);
|
2015-10-26 14:54:21 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
/** Multiply a and b (without taking the modulus!), divide by 2**shift, and round to the nearest integer. Shift must be at least 256. */
|
2019-10-21 15:06:23 +00:00
|
|
|
static void rustsecp256k1_v0_1_0_scalar_mul_shift_var(rustsecp256k1_v0_1_0_scalar *r, const rustsecp256k1_v0_1_0_scalar *a, const rustsecp256k1_v0_1_0_scalar *b, unsigned int shift);
|
2015-10-26 14:54:21 +00:00
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
#endif /* SECP256K1_SCALAR_H */
|