2018-07-09 11:17:44 +00:00
|
|
|
/**********************************************************************
|
|
|
|
* Copyright (c) 2015 Andrew Poelstra *
|
|
|
|
* Distributed under the MIT software license, see the accompanying *
|
|
|
|
* file COPYING or http://www.opensource.org/licenses/mit-license.php.*
|
|
|
|
**********************************************************************/
|
|
|
|
|
|
|
|
#ifndef SECP256K1_SCALAR_REPR_IMPL_H
|
|
|
|
#define SECP256K1_SCALAR_REPR_IMPL_H
|
|
|
|
|
|
|
|
#include "scalar.h"
|
|
|
|
|
|
|
|
#include <string.h>
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_even(const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
return !(*a & 1);
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_clear(rustsecp256k1_v0_1_1_scalar *r) { *r = 0; }
|
|
|
|
SECP256K1_INLINE static void rustsecp256k1_v0_1_1_scalar_set_int(rustsecp256k1_v0_1_1_scalar *r, unsigned int v) { *r = v; }
|
2018-07-09 11:17:44 +00:00
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
2018-07-09 11:17:44 +00:00
|
|
|
if (offset < 32)
|
|
|
|
return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
|
|
|
|
else
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static unsigned int rustsecp256k1_v0_1_1_scalar_get_bits_var(const rustsecp256k1_v0_1_1_scalar *a, unsigned int offset, unsigned int count) {
|
|
|
|
return rustsecp256k1_v0_1_1_scalar_get_bits(a, offset, count);
|
2018-07-09 11:17:44 +00:00
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_check_overflow(const rustsecp256k1_v0_1_1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
|
2018-07-09 11:17:44 +00:00
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static int rustsecp256k1_v0_1_1_scalar_add(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
2018-07-09 11:17:44 +00:00
|
|
|
*r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
return *r < *b;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_cadd_bit(rustsecp256k1_v0_1_1_scalar *r, unsigned int bit, int flag) {
|
2018-07-09 11:17:44 +00:00
|
|
|
if (flag && bit < 32)
|
|
|
|
*r += (1 << bit);
|
|
|
|
#ifdef VERIFY
|
2019-12-11 15:55:00 +00:00
|
|
|
VERIFY_CHECK(rustsecp256k1_v0_1_1_scalar_check_overflow(r) == 0);
|
2018-07-09 11:17:44 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_set_b32(rustsecp256k1_v0_1_1_scalar *r, const unsigned char *b32, int *overflow) {
|
2018-07-09 11:17:44 +00:00
|
|
|
const int base = 0x100 % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
int i;
|
|
|
|
*r = 0;
|
|
|
|
for (i = 0; i < 32; i++) {
|
|
|
|
*r = ((*r * base) + b32[i]) % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
}
|
|
|
|
/* just deny overflow, it basically always happens */
|
|
|
|
if (overflow) *overflow = 0;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1_v0_1_1_scalar* a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
memset(bin, 0, 32);
|
|
|
|
bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_zero(const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == 0;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_negate(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
if (*a == 0) {
|
|
|
|
*r = 0;
|
|
|
|
} else {
|
|
|
|
*r = EXHAUSTIVE_TEST_ORDER - *a;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_is_one(const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == 1;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static int rustsecp256k1_v0_1_1_scalar_is_high(const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a > EXHAUSTIVE_TEST_ORDER / 2;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static int rustsecp256k1_v0_1_1_scalar_cond_negate(rustsecp256k1_v0_1_1_scalar *r, int flag) {
|
|
|
|
if (flag) rustsecp256k1_v0_1_1_scalar_negate(r, r);
|
2018-07-09 11:17:44 +00:00
|
|
|
return flag ? -1 : 1;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_mul(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
2018-07-09 11:17:44 +00:00
|
|
|
*r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static int rustsecp256k1_v0_1_1_scalar_shr_int(rustsecp256k1_v0_1_1_scalar *r, int n) {
|
2018-07-09 11:17:44 +00:00
|
|
|
int ret;
|
|
|
|
VERIFY_CHECK(n > 0);
|
|
|
|
VERIFY_CHECK(n < 16);
|
|
|
|
ret = *r & ((1 << n) - 1);
|
|
|
|
*r >>= n;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_sqr(rustsecp256k1_v0_1_1_scalar *r, const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
*r = (*a * *a) % EXHAUSTIVE_TEST_ORDER;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
static void rustsecp256k1_v0_1_1_scalar_split_128(rustsecp256k1_v0_1_1_scalar *r1, rustsecp256k1_v0_1_1_scalar *r2, const rustsecp256k1_v0_1_1_scalar *a) {
|
2018-07-09 11:17:44 +00:00
|
|
|
*r1 = *a;
|
|
|
|
*r2 = 0;
|
|
|
|
}
|
|
|
|
|
2019-12-11 15:55:00 +00:00
|
|
|
SECP256K1_INLINE static int rustsecp256k1_v0_1_1_scalar_eq(const rustsecp256k1_v0_1_1_scalar *a, const rustsecp256k1_v0_1_1_scalar *b) {
|
2018-07-09 11:17:44 +00:00
|
|
|
return *a == *b;
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* SECP256K1_SCALAR_REPR_IMPL_H */
|