1 /***********************************************************************
2  * Copyright (c) 2015 Andrew Poelstra                                  *
3  * Distributed under the MIT software license, see the accompanying    *
4  * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
5  ***********************************************************************/
6 
7 #ifndef SECP256K1_SCALAR_REPR_IMPL_H
8 #define SECP256K1_SCALAR_REPR_IMPL_H
9 
10 #include "scalar.h"
11 
12 #include <string.h>
13 
secp256k1_scalar_is_even(const secp256k1_scalar * a)14 SECP256K1_INLINE static int secp256k1_scalar_is_even(const secp256k1_scalar *a) {
15     return !(*a & 1);
16 }
17 
secp256k1_scalar_clear(secp256k1_scalar * r)18 SECP256K1_INLINE static void secp256k1_scalar_clear(secp256k1_scalar *r) { *r = 0; }
secp256k1_scalar_set_int(secp256k1_scalar * r,unsigned int v)19 SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar *r, unsigned int v) { *r = v; }
20 
secp256k1_scalar_get_bits(const secp256k1_scalar * a,unsigned int offset,unsigned int count)21 SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits(const secp256k1_scalar *a, unsigned int offset, unsigned int count) {
22     if (offset < 32)
23         return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
24     else
25         return 0;
26 }
27 
secp256k1_scalar_get_bits_var(const secp256k1_scalar * a,unsigned int offset,unsigned int count)28 SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits_var(const secp256k1_scalar *a, unsigned int offset, unsigned int count) {
29     return secp256k1_scalar_get_bits(a, offset, count);
30 }
31 
secp256k1_scalar_check_overflow(const secp256k1_scalar * a)32 SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
33 
secp256k1_scalar_add(secp256k1_scalar * r,const secp256k1_scalar * a,const secp256k1_scalar * b)34 static int secp256k1_scalar_add(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) {
35     *r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
36     return *r < *b;
37 }
38 
secp256k1_scalar_cadd_bit(secp256k1_scalar * r,unsigned int bit,int flag)39 static void secp256k1_scalar_cadd_bit(secp256k1_scalar *r, unsigned int bit, int flag) {
40     if (flag && bit < 32)
41         *r += ((uint32_t)1 << bit);
42 #ifdef VERIFY
43     VERIFY_CHECK(bit < 32);
44     /* Verify that adding (1 << bit) will not overflow any in-range scalar *r by overflowing the underlying uint32_t. */
45     VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER);
46     VERIFY_CHECK(secp256k1_scalar_check_overflow(r) == 0);
47 #endif
48 }
49 
secp256k1_scalar_set_b32(secp256k1_scalar * r,const unsigned char * b32,int * overflow)50 static void secp256k1_scalar_set_b32(secp256k1_scalar *r, const unsigned char *b32, int *overflow) {
51     int i;
52     int over = 0;
53     *r = 0;
54     for (i = 0; i < 32; i++) {
55         *r = (*r * 0x100) + b32[i];
56         if (*r >= EXHAUSTIVE_TEST_ORDER) {
57             over = 1;
58             *r %= EXHAUSTIVE_TEST_ORDER;
59         }
60     }
61     if (overflow) *overflow = over;
62 }
63 
secp256k1_scalar_get_b32(unsigned char * bin,const secp256k1_scalar * a)64 static void secp256k1_scalar_get_b32(unsigned char *bin, const secp256k1_scalar* a) {
65     memset(bin, 0, 32);
66     bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
67 }
68 
secp256k1_scalar_is_zero(const secp256k1_scalar * a)69 SECP256K1_INLINE static int secp256k1_scalar_is_zero(const secp256k1_scalar *a) {
70     return *a == 0;
71 }
72 
secp256k1_scalar_negate(secp256k1_scalar * r,const secp256k1_scalar * a)73 static void secp256k1_scalar_negate(secp256k1_scalar *r, const secp256k1_scalar *a) {
74     if (*a == 0) {
75         *r = 0;
76     } else {
77         *r = EXHAUSTIVE_TEST_ORDER - *a;
78     }
79 }
80 
secp256k1_scalar_is_one(const secp256k1_scalar * a)81 SECP256K1_INLINE static int secp256k1_scalar_is_one(const secp256k1_scalar *a) {
82     return *a == 1;
83 }
84 
secp256k1_scalar_is_high(const secp256k1_scalar * a)85 static int secp256k1_scalar_is_high(const secp256k1_scalar *a) {
86     return *a > EXHAUSTIVE_TEST_ORDER / 2;
87 }
88 
secp256k1_scalar_cond_negate(secp256k1_scalar * r,int flag)89 static int secp256k1_scalar_cond_negate(secp256k1_scalar *r, int flag) {
90     if (flag) secp256k1_scalar_negate(r, r);
91     return flag ? -1 : 1;
92 }
93 
secp256k1_scalar_mul(secp256k1_scalar * r,const secp256k1_scalar * a,const secp256k1_scalar * b)94 static void secp256k1_scalar_mul(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) {
95     *r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
96 }
97 
secp256k1_scalar_shr_int(secp256k1_scalar * r,int n)98 static int secp256k1_scalar_shr_int(secp256k1_scalar *r, int n) {
99     int ret;
100     VERIFY_CHECK(n > 0);
101     VERIFY_CHECK(n < 16);
102     ret = *r & ((1 << n) - 1);
103     *r >>= n;
104     return ret;
105 }
106 
secp256k1_scalar_split_128(secp256k1_scalar * r1,secp256k1_scalar * r2,const secp256k1_scalar * a)107 static void secp256k1_scalar_split_128(secp256k1_scalar *r1, secp256k1_scalar *r2, const secp256k1_scalar *a) {
108     *r1 = *a;
109     *r2 = 0;
110 }
111 
secp256k1_scalar_eq(const secp256k1_scalar * a,const secp256k1_scalar * b)112 SECP256K1_INLINE static int secp256k1_scalar_eq(const secp256k1_scalar *a, const secp256k1_scalar *b) {
113     return *a == *b;
114 }
115 
secp256k1_scalar_cmov(secp256k1_scalar * r,const secp256k1_scalar * a,int flag)116 static SECP256K1_INLINE void secp256k1_scalar_cmov(secp256k1_scalar *r, const secp256k1_scalar *a, int flag) {
117     uint32_t mask0, mask1;
118     VG_CHECK_VERIFY(r, sizeof(*r));
119     mask0 = flag + ~((uint32_t)0);
120     mask1 = ~mask0;
121     *r = (*r & mask0) | (*a & mask1);
122 }
123 
secp256k1_scalar_inverse(secp256k1_scalar * r,const secp256k1_scalar * x)124 static void secp256k1_scalar_inverse(secp256k1_scalar *r, const secp256k1_scalar *x) {
125     int i;
126     *r = 0;
127     for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++)
128         if ((i * *x) % EXHAUSTIVE_TEST_ORDER == 1)
129             *r = i;
130     /* If this VERIFY_CHECK triggers we were given a noninvertible scalar (and thus
131      * have a composite group order; fix it in exhaustive_tests.c). */
132     VERIFY_CHECK(*r != 0);
133 }
134 
secp256k1_scalar_inverse_var(secp256k1_scalar * r,const secp256k1_scalar * x)135 static void secp256k1_scalar_inverse_var(secp256k1_scalar *r, const secp256k1_scalar *x) {
136     secp256k1_scalar_inverse(r, x);
137 }
138 
139 #endif /* SECP256K1_SCALAR_REPR_IMPL_H */
140