Bitcoin ABC 0.33.3
P2P Digital Currency
field_5x52_int128_impl.h
Go to the documentation of this file.
1/***********************************************************************
2 * Copyright (c) 2013, 2014 Pieter Wuille *
3 * Distributed under the MIT software license, see the accompanying *
4 * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
5 ***********************************************************************/
6
7#ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
8#define SECP256K1_FIELD_INNER5X52_IMPL_H
9
10#include <stdint.h>
11
12#include "int128.h"
13#include "util.h"
14
15#ifdef VERIFY
16#define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0)
17#define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
18#else
19#define VERIFY_BITS(x, n) do { } while(0)
20#define VERIFY_BITS_128(x, n) do { } while(0)
21#endif
22
23SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
25 uint64_t t3, t4, tx, u0;
26 uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
27 const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
28
29 VERIFY_BITS(a[0], 56);
30 VERIFY_BITS(a[1], 56);
31 VERIFY_BITS(a[2], 56);
32 VERIFY_BITS(a[3], 56);
33 VERIFY_BITS(a[4], 52);
34 VERIFY_BITS(b[0], 56);
35 VERIFY_BITS(b[1], 56);
36 VERIFY_BITS(b[2], 56);
37 VERIFY_BITS(b[3], 56);
38 VERIFY_BITS(b[4], 52);
39 VERIFY_CHECK(r != b);
40 VERIFY_CHECK(a != b);
41
42 /* [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
43 * for 0 <= x <= 4, px is a shorthand for sum(a[i]*b[x-i], i=0..x).
44 * for 4 <= x <= 8, px is a shorthand for sum(a[i]*b[x-i], i=(x-4)..4)
45 * Note that [x 0 0 0 0 0] = [x*R].
46 */
47
48 secp256k1_u128_mul(&d, a0, b[3]);
49 secp256k1_u128_accum_mul(&d, a1, b[2]);
50 secp256k1_u128_accum_mul(&d, a2, b[1]);
51 secp256k1_u128_accum_mul(&d, a3, b[0]);
52 VERIFY_BITS_128(&d, 114);
53 /* [d 0 0 0] = [p3 0 0 0] */
54 secp256k1_u128_mul(&c, a4, b[4]);
55 VERIFY_BITS_128(&c, 112);
56 /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
58 VERIFY_BITS_128(&d, 115);
59 VERIFY_BITS_128(&c, 48);
60 /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
61 t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
62 VERIFY_BITS(t3, 52);
63 VERIFY_BITS_128(&d, 63);
64 /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
65
66 secp256k1_u128_accum_mul(&d, a0, b[4]);
67 secp256k1_u128_accum_mul(&d, a1, b[3]);
68 secp256k1_u128_accum_mul(&d, a2, b[2]);
69 secp256k1_u128_accum_mul(&d, a3, b[1]);
70 secp256k1_u128_accum_mul(&d, a4, b[0]);
71 VERIFY_BITS_128(&d, 115);
72 /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
74 VERIFY_BITS_128(&d, 116);
75 /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
76 t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
77 VERIFY_BITS(t4, 52);
78 VERIFY_BITS_128(&d, 64);
79 /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
80 tx = (t4 >> 48); t4 &= (M >> 4);
81 VERIFY_BITS(tx, 4);
82 VERIFY_BITS(t4, 48);
83 /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
84
85 secp256k1_u128_mul(&c, a0, b[0]);
86 VERIFY_BITS_128(&c, 112);
87 /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
88 secp256k1_u128_accum_mul(&d, a1, b[4]);
89 secp256k1_u128_accum_mul(&d, a2, b[3]);
90 secp256k1_u128_accum_mul(&d, a3, b[2]);
91 secp256k1_u128_accum_mul(&d, a4, b[1]);
92 VERIFY_BITS_128(&d, 115);
93 /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
94 u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
95 VERIFY_BITS(u0, 52);
96 VERIFY_BITS_128(&d, 63);
97 /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
98 /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
99 u0 = (u0 << 4) | tx;
100 VERIFY_BITS(u0, 56);
101 /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
102 secp256k1_u128_accum_mul(&c, u0, R >> 4);
103 VERIFY_BITS_128(&c, 115);
104 /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
105 r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
106 VERIFY_BITS(r[0], 52);
107 VERIFY_BITS_128(&c, 61);
108 /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
109
110 secp256k1_u128_accum_mul(&c, a0, b[1]);
111 secp256k1_u128_accum_mul(&c, a1, b[0]);
112 VERIFY_BITS_128(&c, 114);
113 /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
114 secp256k1_u128_accum_mul(&d, a2, b[4]);
115 secp256k1_u128_accum_mul(&d, a3, b[3]);
116 secp256k1_u128_accum_mul(&d, a4, b[2]);
117 VERIFY_BITS_128(&d, 114);
118 /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
120 VERIFY_BITS_128(&c, 115);
121 VERIFY_BITS_128(&d, 62);
122 /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
123 r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
124 VERIFY_BITS(r[1], 52);
125 VERIFY_BITS_128(&c, 63);
126 /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
127
128 secp256k1_u128_accum_mul(&c, a0, b[2]);
129 secp256k1_u128_accum_mul(&c, a1, b[1]);
130 secp256k1_u128_accum_mul(&c, a2, b[0]);
131 VERIFY_BITS_128(&c, 114);
132 /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
133 secp256k1_u128_accum_mul(&d, a3, b[4]);
134 secp256k1_u128_accum_mul(&d, a4, b[3]);
135 VERIFY_BITS_128(&d, 114);
136 /* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
138 VERIFY_BITS_128(&c, 115);
139 VERIFY_BITS_128(&d, 50);
140 /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
141
142 r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
143 VERIFY_BITS(r[2], 52);
144 VERIFY_BITS_128(&c, 63);
145 /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
148 VERIFY_BITS_128(&c, 100);
149 /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
150 r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
151 VERIFY_BITS(r[3], 52);
152 VERIFY_BITS_128(&c, 48);
153 /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
154 r[4] = secp256k1_u128_to_u64(&c) + t4;
155 VERIFY_BITS(r[4], 49);
156 /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
157}
158
159SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
161 uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
162 int64_t t3, t4, tx, u0;
163 const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
164
165 VERIFY_BITS(a[0], 56);
166 VERIFY_BITS(a[1], 56);
167 VERIFY_BITS(a[2], 56);
168 VERIFY_BITS(a[3], 56);
169 VERIFY_BITS(a[4], 52);
170
176 secp256k1_u128_mul(&d, a0*2, a3);
177 secp256k1_u128_accum_mul(&d, a1*2, a2);
178 VERIFY_BITS_128(&d, 114);
179 /* [d 0 0 0] = [p3 0 0 0] */
180 secp256k1_u128_mul(&c, a4, a4);
181 VERIFY_BITS_128(&c, 112);
182 /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
184 VERIFY_BITS_128(&d, 115);
185 VERIFY_BITS_128(&c, 48);
186 /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
187 t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
188 VERIFY_BITS(t3, 52);
189 VERIFY_BITS_128(&d, 63);
190 /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
191
192 a4 *= 2;
193 secp256k1_u128_accum_mul(&d, a0, a4);
194 secp256k1_u128_accum_mul(&d, a1*2, a3);
195 secp256k1_u128_accum_mul(&d, a2, a2);
196 VERIFY_BITS_128(&d, 115);
197 /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
199 VERIFY_BITS_128(&d, 116);
200 /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
201 t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
202 VERIFY_BITS(t4, 52);
203 VERIFY_BITS_128(&d, 64);
204 /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
205 tx = (t4 >> 48); t4 &= (M >> 4);
206 VERIFY_BITS(tx, 4);
207 VERIFY_BITS(t4, 48);
208 /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
209
210 secp256k1_u128_mul(&c, a0, a0);
211 VERIFY_BITS_128(&c, 112);
212 /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
213 secp256k1_u128_accum_mul(&d, a1, a4);
214 secp256k1_u128_accum_mul(&d, a2*2, a3);
215 VERIFY_BITS_128(&d, 114);
216 /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
217 u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
218 VERIFY_BITS(u0, 52);
219 VERIFY_BITS_128(&d, 62);
220 /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
221 /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
222 u0 = (u0 << 4) | tx;
223 VERIFY_BITS(u0, 56);
224 /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
225 secp256k1_u128_accum_mul(&c, u0, R >> 4);
226 VERIFY_BITS_128(&c, 113);
227 /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
228 r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
229 VERIFY_BITS(r[0], 52);
230 VERIFY_BITS_128(&c, 61);
231 /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
232
233 a0 *= 2;
234 secp256k1_u128_accum_mul(&c, a0, a1);
235 VERIFY_BITS_128(&c, 114);
236 /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
237 secp256k1_u128_accum_mul(&d, a2, a4);
238 secp256k1_u128_accum_mul(&d, a3, a3);
239 VERIFY_BITS_128(&d, 114);
240 /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
242 VERIFY_BITS_128(&c, 115);
243 VERIFY_BITS_128(&d, 62);
244 /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
245 r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
246 VERIFY_BITS(r[1], 52);
247 VERIFY_BITS_128(&c, 63);
248 /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
249
250 secp256k1_u128_accum_mul(&c, a0, a2);
251 secp256k1_u128_accum_mul(&c, a1, a1);
252 VERIFY_BITS_128(&c, 114);
253 /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
254 secp256k1_u128_accum_mul(&d, a3, a4);
255 VERIFY_BITS_128(&d, 114);
256 /* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
258 VERIFY_BITS_128(&c, 115);
259 VERIFY_BITS_128(&d, 50);
260 /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
261 r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
262 VERIFY_BITS(r[2], 52);
263 VERIFY_BITS_128(&c, 63);
264 /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
265
268 VERIFY_BITS_128(&c, 100);
269 /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
270 r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
271 VERIFY_BITS(r[3], 52);
272 VERIFY_BITS_128(&c, 48);
273 /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
274 r[4] = secp256k1_u128_to_u64(&c) + t4;
275 VERIFY_BITS(r[4], 49);
276 /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
277}
278
279#endif /* SECP256K1_FIELD_INNER5X52_IMPL_H */
#define VERIFY_BITS_128(x, n)
static SECP256K1_INLINE void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t *SECP256K1_RESTRICT b)
#define VERIFY_BITS(x, n)
static SECP256K1_INLINE void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a)
static SECP256K1_INLINE void secp256k1_u128_rshift(secp256k1_uint128 *r, unsigned int n)
static SECP256K1_INLINE void secp256k1_u128_accum_u64(secp256k1_uint128 *r, uint64_t a)
static SECP256K1_INLINE void secp256k1_u128_accum_mul(secp256k1_uint128 *r, uint64_t a, uint64_t b)
static SECP256K1_INLINE void secp256k1_u128_mul(secp256k1_uint128 *r, uint64_t a, uint64_t b)
static SECP256K1_INLINE uint64_t secp256k1_u128_to_u64(const secp256k1_uint128 *a)
#define SECP256K1_INLINE
Definition: util.h:48
#define VERIFY_CHECK(cond)
Definition: util.h:130
#define SECP256K1_RESTRICT
Definition: util.h:171