2020-12-29 17:15:51 +00:00
|
|
|
/***********************************************************************
|
|
|
|
* Copyright (c) 2015 Andrew Poelstra *
|
|
|
|
* Distributed under the MIT software license, see the accompanying *
|
|
|
|
* file COPYING or https://www.opensource.org/licenses/mit-license.php.*
|
|
|
|
***********************************************************************/
|
2018-07-09 11:17:44 +00:00
|
|
|
|
|
|
|
#ifndef SECP256K1_SCALAR_REPR_IMPL_H
|
|
|
|
#define SECP256K1_SCALAR_REPR_IMPL_H
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
#include "checkmem.h"
|
2018-07-09 11:17:44 +00:00
|
|
|
#include "scalar.h"
|
2023-09-27 18:37:09 +00:00
|
|
|
#include "util.h"
|
2018-07-09 11:17:44 +00:00
|
|
|
|
|
|
|
#include <string.h>
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_9_0_scalar_is_even(const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
return !(*a & 1);
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static void rustsecp256k1_v0_9_0_scalar_clear(rustsecp256k1_v0_9_0_scalar *r) { *r = 0; }
|
|
|
|
|
|
|
|
SECP256K1_INLINE static void rustsecp256k1_v0_9_0_scalar_set_int(rustsecp256k1_v0_9_0_scalar *r, unsigned int v) {
|
|
|
|
*r = v % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
|
|
|
}
|
|
|
|
|
|
|
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_9_0_scalar_get_bits(const rustsecp256k1_v0_9_0_scalar *a, unsigned int offset, unsigned int count) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
2018-07-09 11:17:44 +00:00
|
|
|
|
|
|
|
if (offset < 32)
|
|
|
|
return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
|
|
|
|
else
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_9_0_scalar_get_bits_var(const rustsecp256k1_v0_9_0_scalar *a, unsigned int offset, unsigned int count) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
|
|
|
return rustsecp256k1_v0_9_0_scalar_get_bits(a, offset, count);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_9_0_scalar_check_overflow(const rustsecp256k1_v0_9_0_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
|
|
|
|
|
|
|
|
static int rustsecp256k1_v0_9_0_scalar_add(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *a, const rustsecp256k1_v0_9_0_scalar *b) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(b);
|
2018-07-09 11:17:44 +00:00
|
|
|
|
|
|
|
*r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
return *r < *b;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_cadd_bit(rustsecp256k1_v0_9_0_scalar *r, unsigned int bit, int flag) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
if (flag && bit < 32)
|
2020-08-26 17:35:27 +00:00
|
|
|
*r += ((uint32_t)1 << bit);
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
#ifdef VERIFY
|
2020-08-26 17:35:27 +00:00
|
|
|
VERIFY_CHECK(bit < 32);
|
|
|
|
/* Verify that adding (1 << bit) will not overflow any in-range scalar *r by overflowing the underlying uint32_t. */
|
|
|
|
VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER);
|
2018-07-09 11:17:44 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_set_b32(rustsecp256k1_v0_9_0_scalar *r, const unsigned char *b32, int *overflow) {
|
2018-07-09 11:17:44 +00:00
|
|
|
int i;
|
2020-09-15 01:39:26 +00:00
|
|
|
int over = 0;
|
2018-07-09 11:17:44 +00:00
|
|
|
*r = 0;
|
|
|
|
for (i = 0; i < 32; i++) {
|
2020-09-15 01:39:26 +00:00
|
|
|
*r = (*r * 0x100) + b32[i];
|
|
|
|
if (*r >= EXHAUSTIVE_TEST_ORDER) {
|
|
|
|
over = 1;
|
|
|
|
*r %= EXHAUSTIVE_TEST_ORDER;
|
|
|
|
}
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
2020-09-15 01:39:26 +00:00
|
|
|
if (overflow) *overflow = over;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_9_0_scalar* a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
memset(bin, 0, 32);
|
|
|
|
bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_9_0_scalar_is_zero(const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == 0;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_negate(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
if (*a == 0) {
|
|
|
|
*r = 0;
|
|
|
|
} else {
|
|
|
|
*r = EXHAUSTIVE_TEST_ORDER - *a;
|
|
|
|
}
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_9_0_scalar_is_one(const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == 1;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static int rustsecp256k1_v0_9_0_scalar_is_high(const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a > EXHAUSTIVE_TEST_ORDER / 2;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static int rustsecp256k1_v0_9_0_scalar_cond_negate(rustsecp256k1_v0_9_0_scalar *r, int flag) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
|
|
|
|
|
|
|
if (flag) rustsecp256k1_v0_9_0_scalar_negate(r, r);
|
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
return flag ? -1 : 1;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_mul(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *a, const rustsecp256k1_v0_9_0_scalar *b) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(b);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
*r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static int rustsecp256k1_v0_9_0_scalar_shr_int(rustsecp256k1_v0_9_0_scalar *r, int n) {
|
2018-07-09 11:17:44 +00:00
|
|
|
int ret;
|
2023-09-27 18:37:09 +00:00
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
VERIFY_CHECK(n > 0);
|
|
|
|
VERIFY_CHECK(n < 16);
|
2023-09-27 18:37:09 +00:00
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
ret = *r & ((1 << n) - 1);
|
|
|
|
*r >>= n;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2018-07-09 11:17:44 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_split_128(rustsecp256k1_v0_9_0_scalar *r1, rustsecp256k1_v0_9_0_scalar *r2, const rustsecp256k1_v0_9_0_scalar *a) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
*r1 = *a;
|
|
|
|
*r2 = 0;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r1);
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r2);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_9_0_scalar_eq(const rustsecp256k1_v0_9_0_scalar *a, const rustsecp256k1_v0_9_0_scalar *b) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(b);
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == *b;
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static SECP256K1_INLINE void rustsecp256k1_v0_9_0_scalar_cmov(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *a, int flag) {
|
2020-08-26 17:35:27 +00:00
|
|
|
uint32_t mask0, mask1;
|
2023-09-27 18:37:09 +00:00
|
|
|
volatile int vflag = flag;
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(a);
|
|
|
|
SECP256K1_CHECKMEM_CHECK_VERIFY(r, sizeof(*r));
|
|
|
|
|
|
|
|
mask0 = vflag + ~((uint32_t)0);
|
2020-08-26 17:35:27 +00:00
|
|
|
mask1 = ~mask0;
|
|
|
|
*r = (*r & mask0) | (*a & mask1);
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2020-08-26 17:35:27 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_inverse(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *x) {
|
2021-06-14 14:55:38 +00:00
|
|
|
int i;
|
|
|
|
*r = 0;
|
2023-09-27 18:37:09 +00:00
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(x);
|
|
|
|
|
2021-06-14 14:55:38 +00:00
|
|
|
for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++)
|
|
|
|
if ((i * *x) % EXHAUSTIVE_TEST_ORDER == 1)
|
|
|
|
*r = i;
|
2023-09-27 18:37:09 +00:00
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2021-06-14 14:55:38 +00:00
|
|
|
/* If this VERIFY_CHECK triggers we were given a noninvertible scalar (and thus
|
|
|
|
* have a composite group order; fix it in exhaustive_tests.c). */
|
|
|
|
VERIFY_CHECK(*r != 0);
|
|
|
|
}
|
|
|
|
|
2023-09-27 18:37:09 +00:00
|
|
|
static void rustsecp256k1_v0_9_0_scalar_inverse_var(rustsecp256k1_v0_9_0_scalar *r, const rustsecp256k1_v0_9_0_scalar *x) {
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(x);
|
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_inverse(r, x);
|
|
|
|
|
|
|
|
rustsecp256k1_v0_9_0_scalar_verify(r);
|
2021-06-14 14:55:38 +00:00
|
|
|
}
|
|
|
|
|
2018-07-09 11:17:44 +00:00
|
|
|
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|