Electroneum
scalar_low_impl.h
Go to the documentation of this file.
1 /***********************************************************************
2  * Copyright (c) 2015 Andrew Poelstra *
3  * Distributed under the MIT software license, see the accompanying *
4  * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
5  ***********************************************************************/
6 
7 #ifndef SECP256K1_SCALAR_REPR_IMPL_H
8 #define SECP256K1_SCALAR_REPR_IMPL_H
9 
10 #include "checkmem.h"
11 #include "scalar.h"
12 
13 #include <string.h>
14 
15 SECP256K1_INLINE static int secp256k1_scalar_is_even(const secp256k1_scalar *a) {
16  return !(*a & 1);
17 }
18 
19 SECP256K1_INLINE static void secp256k1_scalar_clear(secp256k1_scalar *r) { *r = 0; }
20 SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar *r, unsigned int v) { *r = v; }
21 
22 SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits(const secp256k1_scalar *a, unsigned int offset, unsigned int count) {
23  if (offset < 32)
24  return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
25  else
26  return 0;
27 }
28 
29 SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits_var(const secp256k1_scalar *a, unsigned int offset, unsigned int count) {
30  return secp256k1_scalar_get_bits(a, offset, count);
31 }
32 
33 SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
34 
35 static int secp256k1_scalar_add(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) {
36  *r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
37  return *r < *b;
38 }
39 
40 static void secp256k1_scalar_cadd_bit(secp256k1_scalar *r, unsigned int bit, int flag) {
41  if (flag && bit < 32)
42  *r += ((uint32_t)1 << bit);
43 #ifdef VERIFY
44  VERIFY_CHECK(bit < 32);
45  /* Verify that adding (1 << bit) will not overflow any in-range scalar *r by overflowing the underlying uint32_t. */
46  VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER);
47  VERIFY_CHECK(secp256k1_scalar_check_overflow(r) == 0);
48 #endif
49 }
50 
51 static void secp256k1_scalar_set_b32(secp256k1_scalar *r, const unsigned char *b32, int *overflow) {
52  int i;
53  int over = 0;
54  *r = 0;
55  for (i = 0; i < 32; i++) {
56  *r = (*r * 0x100) + b32[i];
57  if (*r >= EXHAUSTIVE_TEST_ORDER) {
58  over = 1;
59  *r %= EXHAUSTIVE_TEST_ORDER;
60  }
61  }
62  if (overflow) *overflow = over;
63 }
64 
65 static void secp256k1_scalar_get_b32(unsigned char *bin, const secp256k1_scalar* a) {
66  memset(bin, 0, 32);
67  bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
68 }
69 
70 SECP256K1_INLINE static int secp256k1_scalar_is_zero(const secp256k1_scalar *a) {
71  return *a == 0;
72 }
73 
74 static void secp256k1_scalar_negate(secp256k1_scalar *r, const secp256k1_scalar *a) {
75  if (*a == 0) {
76  *r = 0;
77  } else {
78  *r = EXHAUSTIVE_TEST_ORDER - *a;
79  }
80 }
81 
82 SECP256K1_INLINE static int secp256k1_scalar_is_one(const secp256k1_scalar *a) {
83  return *a == 1;
84 }
85 
86 static int secp256k1_scalar_is_high(const secp256k1_scalar *a) {
87  return *a > EXHAUSTIVE_TEST_ORDER / 2;
88 }
89 
90 static int secp256k1_scalar_cond_negate(secp256k1_scalar *r, int flag) {
91  if (flag) secp256k1_scalar_negate(r, r);
92  return flag ? -1 : 1;
93 }
94 
95 static void secp256k1_scalar_mul(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) {
96  *r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
97 }
98 
99 static int secp256k1_scalar_shr_int(secp256k1_scalar *r, int n) {
100  int ret;
101  VERIFY_CHECK(n > 0);
102  VERIFY_CHECK(n < 16);
103  ret = *r & ((1 << n) - 1);
104  *r >>= n;
105  return ret;
106 }
107 
108 static void secp256k1_scalar_split_128(secp256k1_scalar *r1, secp256k1_scalar *r2, const secp256k1_scalar *a) {
109  *r1 = *a;
110  *r2 = 0;
111 }
112 
113 SECP256K1_INLINE static int secp256k1_scalar_eq(const secp256k1_scalar *a, const secp256k1_scalar *b) {
114  return *a == *b;
115 }
116 
117 static SECP256K1_INLINE void secp256k1_scalar_cmov(secp256k1_scalar *r, const secp256k1_scalar *a, int flag) {
118  uint32_t mask0, mask1;
119  SECP256K1_CHECKMEM_CHECK_VERIFY(r, sizeof(*r));
120  mask0 = flag + ~((uint32_t)0);
121  mask1 = ~mask0;
122  *r = (*r & mask0) | (*a & mask1);
123 }
124 
125 static void secp256k1_scalar_inverse(secp256k1_scalar *r, const secp256k1_scalar *x) {
126  int i;
127  *r = 0;
128  for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++)
129  if ((i * *x) % EXHAUSTIVE_TEST_ORDER == 1)
130  *r = i;
131  /* If this VERIFY_CHECK triggers we were given a noninvertible scalar (and thus
132  * have a composite group order; fix it in exhaustive_tests.c). */
133  VERIFY_CHECK(*r != 0);
134 }
135 
136 static void secp256k1_scalar_inverse_var(secp256k1_scalar *r, const secp256k1_scalar *x) {
137  secp256k1_scalar_inverse(r, x);
138 }
139 
140 #endif /* SECP256K1_SCALAR_REPR_IMPL_H */
#define VERIFY_CHECK(cond)
Definition: util.h:96
mdb_size_t count(MDB_cursor *cur)
#define SECP256K1_INLINE
Definition: secp256k1.h:131
unsigned int uint32_t
Definition: stdint.h:126
const GenericPointer< typename T::ValueType > T2 T::AllocatorType & a
Definition: pointer.h:1124
#define UINT32_MAX
Definition: stdint.h:188
#define SECP256K1_CHECKMEM_CHECK_VERIFY(p, len)
Definition: checkmem.h:85