7 #ifndef SECP256K1_FIELD_INNER5X52_IMPL_H 8 #define SECP256K1_FIELD_INNER5X52_IMPL_H 15 #define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0) 16 #define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n))) 18 #define VERIFY_BITS(x, n) do { } while(0) 19 #define VERIFY_BITS_128(x, n) do { } while(0) 25 uint64_t a0 =
a[0], a1 =
a[1], a2 =
a[2], a3 =
a[3], a4 =
a[4];
26 const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
47 secp256k1_u128_mul(&d, a0, b[3]);
48 secp256k1_u128_accum_mul(&d, a1, b[2]);
49 secp256k1_u128_accum_mul(&d, a2, b[1]);
50 secp256k1_u128_accum_mul(&d, a3, b[0]);
53 secp256k1_u128_mul(&c, a4, b[4]);
56 secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
60 t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
65 secp256k1_u128_accum_mul(&d, a0, b[4]);
66 secp256k1_u128_accum_mul(&d, a1, b[3]);
67 secp256k1_u128_accum_mul(&d, a2, b[2]);
68 secp256k1_u128_accum_mul(&d, a3, b[1]);
69 secp256k1_u128_accum_mul(&d, a4, b[0]);
72 secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
75 t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
79 tx = (t4 >> 48); t4 &= (M >> 4);
84 secp256k1_u128_mul(&c, a0, b[0]);
87 secp256k1_u128_accum_mul(&d, a1, b[4]);
88 secp256k1_u128_accum_mul(&d, a2, b[3]);
89 secp256k1_u128_accum_mul(&d, a3, b[2]);
90 secp256k1_u128_accum_mul(&d, a4, b[1]);
93 u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
101 secp256k1_u128_accum_mul(&c, u0, R >> 4);
104 r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
109 secp256k1_u128_accum_mul(&c, a0, b[1]);
110 secp256k1_u128_accum_mul(&c, a1, b[0]);
113 secp256k1_u128_accum_mul(&d, a2, b[4]);
114 secp256k1_u128_accum_mul(&d, a3, b[3]);
115 secp256k1_u128_accum_mul(&d, a4, b[2]);
118 secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
122 r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
127 secp256k1_u128_accum_mul(&c, a0, b[2]);
128 secp256k1_u128_accum_mul(&c, a1, b[1]);
129 secp256k1_u128_accum_mul(&c, a2, b[0]);
132 secp256k1_u128_accum_mul(&d, a3, b[4]);
133 secp256k1_u128_accum_mul(&d, a4, b[3]);
136 secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
141 r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
145 secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
146 secp256k1_u128_accum_u64(&c,
t3);
149 r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
153 r[4] = secp256k1_u128_to_u64(&c) + t4;
160 uint64_t a0 =
a[0], a1 =
a[1], a2 =
a[2], a3 =
a[3], a4 =
a[4];
162 const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
175 secp256k1_u128_mul(&d, a0*2, a3);
176 secp256k1_u128_accum_mul(&d, a1*2, a2);
179 secp256k1_u128_mul(&c, a4, a4);
182 secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
186 t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
192 secp256k1_u128_accum_mul(&d, a0, a4);
193 secp256k1_u128_accum_mul(&d, a1*2, a3);
194 secp256k1_u128_accum_mul(&d, a2, a2);
197 secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
200 t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
204 tx = (t4 >> 48); t4 &= (M >> 4);
209 secp256k1_u128_mul(&c, a0, a0);
212 secp256k1_u128_accum_mul(&d, a1, a4);
213 secp256k1_u128_accum_mul(&d, a2*2, a3);
216 u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
224 secp256k1_u128_accum_mul(&c, u0, R >> 4);
227 r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
233 secp256k1_u128_accum_mul(&c, a0, a1);
236 secp256k1_u128_accum_mul(&d, a2, a4);
237 secp256k1_u128_accum_mul(&d, a3, a3);
240 secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
244 r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
249 secp256k1_u128_accum_mul(&c, a0, a2);
250 secp256k1_u128_accum_mul(&c, a1, a1);
253 secp256k1_u128_accum_mul(&d, a3, a4);
256 secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
260 r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
265 secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
266 secp256k1_u128_accum_u64(&c,
t3);
269 r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
273 r[4] = secp256k1_u128_to_u64(&c) + t4;
#define VERIFY_CHECK(cond)
#define VERIFY_BITS_128(x, n)
unsigned __int64 uint64_t
#define VERIFY_BITS(x, n)
const GenericPointer< typename T::ValueType > T2 T::AllocatorType & a
#define SECP256K1_RESTRICT