1 /* Copyright (c) INRIA and Microsoft Corporation. All rights reserved.
2    Licensed under the Apache 2.0 License. */
3 
4 /* This file was generated by KreMLin <https://github.com/FStarLang/kremlin>
5  * KreMLin invocation: ../krml -fc89 -fparentheses -fno-shadow -header /mnt/e/everest/verify/hdrB9w -minimal -fparentheses -fcurly-braces -fno-shadow -header copyright-header.txt -minimal -tmpdir extracted -warn-error +9+11 -skip-compilation -extract-uints -add-include <inttypes.h> -add-include "kremlib.h" -add-include "kremlin/internal/compat.h" extracted/prims.krml extracted/FStar_Pervasives_Native.krml extracted/FStar_Pervasives.krml extracted/FStar_Mul.krml extracted/FStar_Squash.krml extracted/FStar_Classical.krml extracted/FStar_StrongExcludedMiddle.krml extracted/FStar_FunctionalExtensionality.krml extracted/FStar_List_Tot_Base.krml extracted/FStar_List_Tot_Properties.krml extracted/FStar_List_Tot.krml extracted/FStar_Seq_Base.krml extracted/FStar_Seq_Properties.krml extracted/FStar_Seq.krml extracted/FStar_Math_Lib.krml extracted/FStar_Math_Lemmas.krml extracted/FStar_BitVector.krml extracted/FStar_UInt.krml extracted/FStar_UInt32.krml extracted/FStar_Int.krml extracted/FStar_Int16.krml extracted/FStar_Preorder.krml extracted/FStar_Ghost.krml extracted/FStar_ErasedLogic.krml extracted/FStar_UInt64.krml extracted/FStar_Set.krml extracted/FStar_PropositionalExtensionality.krml extracted/FStar_PredicateExtensionality.krml extracted/FStar_TSet.krml extracted/FStar_Monotonic_Heap.krml extracted/FStar_Heap.krml extracted/FStar_Map.krml extracted/FStar_Monotonic_HyperHeap.krml extracted/FStar_Monotonic_HyperStack.krml extracted/FStar_HyperStack.krml extracted/FStar_Monotonic_Witnessed.krml extracted/FStar_HyperStack_ST.krml extracted/FStar_HyperStack_All.krml extracted/FStar_Date.krml extracted/FStar_Universe.krml extracted/FStar_GSet.krml extracted/FStar_ModifiesGen.krml extracted/LowStar_Monotonic_Buffer.krml extracted/LowStar_Buffer.krml extracted/Spec_Loops.krml extracted/LowStar_BufferOps.krml extracted/C_Loops.krml extracted/FStar_UInt8.krml extracted/FStar_Kremlin_Endianness.krml extracted/FStar_UInt63.krml extracted/FStar_Exn.krml extracted/FStar_ST.krml extracted/FStar_All.krml extracted/FStar_Dyn.krml extracted/FStar_Int63.krml extracted/FStar_Int64.krml extracted/FStar_Int32.krml extracted/FStar_Int8.krml extracted/FStar_UInt16.krml extracted/FStar_Int_Cast.krml extracted/FStar_UInt128.krml extracted/C_Endianness.krml extracted/FStar_List.krml extracted/FStar_Float.krml extracted/FStar_IO.krml extracted/C.krml extracted/FStar_Char.krml extracted/FStar_String.krml extracted/LowStar_Modifies.krml extracted/C_String.krml extracted/FStar_Bytes.krml extracted/FStar_HyperStack_IO.krml extracted/C_Failure.krml extracted/TestLib.krml extracted/FStar_Int_Cast_Full.krml
6  * F* version: 059db0c8
7  * KreMLin version: 916c37ac
8  */
9 
10 
11 #include "FStar_UInt128.h"
12 #include "kremlin/c_endianness.h"
13 #include "FStar_UInt64_FStar_UInt32_FStar_UInt16_FStar_UInt8.h"
14 
FStar_UInt128___proj__Mkuint128__item__low(FStar_UInt128_uint128 projectee)15 uint64_t FStar_UInt128___proj__Mkuint128__item__low(FStar_UInt128_uint128 projectee)
16 {
17   return projectee.low;
18 }
19 
FStar_UInt128___proj__Mkuint128__item__high(FStar_UInt128_uint128 projectee)20 uint64_t FStar_UInt128___proj__Mkuint128__item__high(FStar_UInt128_uint128 projectee)
21 {
22   return projectee.high;
23 }
24 
FStar_UInt128_constant_time_carry(uint64_t a,uint64_t b)25 static uint64_t FStar_UInt128_constant_time_carry(uint64_t a, uint64_t b)
26 {
27   return (a ^ ((a ^ b) | ((a - b) ^ b))) >> (uint32_t)63U;
28 }
29 
FStar_UInt128_carry(uint64_t a,uint64_t b)30 static uint64_t FStar_UInt128_carry(uint64_t a, uint64_t b)
31 {
32   return FStar_UInt128_constant_time_carry(a, b);
33 }
34 
FStar_UInt128_add(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)35 FStar_UInt128_uint128 FStar_UInt128_add(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
36 {
37   FStar_UInt128_uint128
38   flat = { a.low + b.low, a.high + b.high + FStar_UInt128_carry(a.low + b.low, b.low) };
39   return flat;
40 }
41 
42 FStar_UInt128_uint128
FStar_UInt128_add_underspec(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)43 FStar_UInt128_add_underspec(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
44 {
45   FStar_UInt128_uint128
46   flat = { a.low + b.low, a.high + b.high + FStar_UInt128_carry(a.low + b.low, b.low) };
47   return flat;
48 }
49 
FStar_UInt128_add_mod(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)50 FStar_UInt128_uint128 FStar_UInt128_add_mod(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
51 {
52   FStar_UInt128_uint128
53   flat = { a.low + b.low, a.high + b.high + FStar_UInt128_carry(a.low + b.low, b.low) };
54   return flat;
55 }
56 
FStar_UInt128_sub(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)57 FStar_UInt128_uint128 FStar_UInt128_sub(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
58 {
59   FStar_UInt128_uint128
60   flat = { a.low - b.low, a.high - b.high - FStar_UInt128_carry(a.low, a.low - b.low) };
61   return flat;
62 }
63 
64 FStar_UInt128_uint128
FStar_UInt128_sub_underspec(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)65 FStar_UInt128_sub_underspec(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
66 {
67   FStar_UInt128_uint128
68   flat = { a.low - b.low, a.high - b.high - FStar_UInt128_carry(a.low, a.low - b.low) };
69   return flat;
70 }
71 
72 static FStar_UInt128_uint128
FStar_UInt128_sub_mod_impl(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)73 FStar_UInt128_sub_mod_impl(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
74 {
75   FStar_UInt128_uint128
76   flat = { a.low - b.low, a.high - b.high - FStar_UInt128_carry(a.low, a.low - b.low) };
77   return flat;
78 }
79 
FStar_UInt128_sub_mod(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)80 FStar_UInt128_uint128 FStar_UInt128_sub_mod(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
81 {
82   return FStar_UInt128_sub_mod_impl(a, b);
83 }
84 
FStar_UInt128_logand(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)85 FStar_UInt128_uint128 FStar_UInt128_logand(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
86 {
87   FStar_UInt128_uint128 flat = { a.low & b.low, a.high & b.high };
88   return flat;
89 }
90 
FStar_UInt128_logxor(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)91 FStar_UInt128_uint128 FStar_UInt128_logxor(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
92 {
93   FStar_UInt128_uint128 flat = { a.low ^ b.low, a.high ^ b.high };
94   return flat;
95 }
96 
FStar_UInt128_logor(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)97 FStar_UInt128_uint128 FStar_UInt128_logor(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
98 {
99   FStar_UInt128_uint128 flat = { a.low | b.low, a.high | b.high };
100   return flat;
101 }
102 
FStar_UInt128_lognot(FStar_UInt128_uint128 a)103 FStar_UInt128_uint128 FStar_UInt128_lognot(FStar_UInt128_uint128 a)
104 {
105   FStar_UInt128_uint128 flat = { ~a.low, ~a.high };
106   return flat;
107 }
108 
109 static uint32_t FStar_UInt128_u32_64 = (uint32_t)64U;
110 
FStar_UInt128_add_u64_shift_left(uint64_t hi,uint64_t lo,uint32_t s)111 static uint64_t FStar_UInt128_add_u64_shift_left(uint64_t hi, uint64_t lo, uint32_t s)
112 {
113   return (hi << s) + (lo >> (FStar_UInt128_u32_64 - s));
114 }
115 
FStar_UInt128_add_u64_shift_left_respec(uint64_t hi,uint64_t lo,uint32_t s)116 static uint64_t FStar_UInt128_add_u64_shift_left_respec(uint64_t hi, uint64_t lo, uint32_t s)
117 {
118   return FStar_UInt128_add_u64_shift_left(hi, lo, s);
119 }
120 
121 static FStar_UInt128_uint128
FStar_UInt128_shift_left_small(FStar_UInt128_uint128 a,uint32_t s)122 FStar_UInt128_shift_left_small(FStar_UInt128_uint128 a, uint32_t s)
123 {
124   if (s == (uint32_t)0U)
125   {
126     return a;
127   }
128   else
129   {
130     FStar_UInt128_uint128
131     flat = { a.low << s, FStar_UInt128_add_u64_shift_left_respec(a.high, a.low, s) };
132     return flat;
133   }
134 }
135 
136 static FStar_UInt128_uint128
FStar_UInt128_shift_left_large(FStar_UInt128_uint128 a,uint32_t s)137 FStar_UInt128_shift_left_large(FStar_UInt128_uint128 a, uint32_t s)
138 {
139   FStar_UInt128_uint128 flat = { (uint64_t)0U, a.low << (s - FStar_UInt128_u32_64) };
140   return flat;
141 }
142 
FStar_UInt128_shift_left(FStar_UInt128_uint128 a,uint32_t s)143 FStar_UInt128_uint128 FStar_UInt128_shift_left(FStar_UInt128_uint128 a, uint32_t s)
144 {
145   if (s < FStar_UInt128_u32_64)
146   {
147     return FStar_UInt128_shift_left_small(a, s);
148   }
149   else
150   {
151     return FStar_UInt128_shift_left_large(a, s);
152   }
153 }
154 
FStar_UInt128_add_u64_shift_right(uint64_t hi,uint64_t lo,uint32_t s)155 static uint64_t FStar_UInt128_add_u64_shift_right(uint64_t hi, uint64_t lo, uint32_t s)
156 {
157   return (lo >> s) + (hi << (FStar_UInt128_u32_64 - s));
158 }
159 
FStar_UInt128_add_u64_shift_right_respec(uint64_t hi,uint64_t lo,uint32_t s)160 static uint64_t FStar_UInt128_add_u64_shift_right_respec(uint64_t hi, uint64_t lo, uint32_t s)
161 {
162   return FStar_UInt128_add_u64_shift_right(hi, lo, s);
163 }
164 
165 static FStar_UInt128_uint128
FStar_UInt128_shift_right_small(FStar_UInt128_uint128 a,uint32_t s)166 FStar_UInt128_shift_right_small(FStar_UInt128_uint128 a, uint32_t s)
167 {
168   if (s == (uint32_t)0U)
169   {
170     return a;
171   }
172   else
173   {
174     FStar_UInt128_uint128
175     flat = { FStar_UInt128_add_u64_shift_right_respec(a.high, a.low, s), a.high >> s };
176     return flat;
177   }
178 }
179 
180 static FStar_UInt128_uint128
FStar_UInt128_shift_right_large(FStar_UInt128_uint128 a,uint32_t s)181 FStar_UInt128_shift_right_large(FStar_UInt128_uint128 a, uint32_t s)
182 {
183   FStar_UInt128_uint128 flat = { a.high >> (s - FStar_UInt128_u32_64), (uint64_t)0U };
184   return flat;
185 }
186 
FStar_UInt128_shift_right(FStar_UInt128_uint128 a,uint32_t s)187 FStar_UInt128_uint128 FStar_UInt128_shift_right(FStar_UInt128_uint128 a, uint32_t s)
188 {
189   if (s < FStar_UInt128_u32_64)
190   {
191     return FStar_UInt128_shift_right_small(a, s);
192   }
193   else
194   {
195     return FStar_UInt128_shift_right_large(a, s);
196   }
197 }
198 
FStar_UInt128_eq(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)199 bool FStar_UInt128_eq(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
200 {
201   return a.low == b.low && a.high == b.high;
202 }
203 
FStar_UInt128_gt(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)204 bool FStar_UInt128_gt(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
205 {
206   return a.high > b.high || (a.high == b.high && a.low > b.low);
207 }
208 
FStar_UInt128_lt(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)209 bool FStar_UInt128_lt(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
210 {
211   return a.high < b.high || (a.high == b.high && a.low < b.low);
212 }
213 
FStar_UInt128_gte(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)214 bool FStar_UInt128_gte(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
215 {
216   return a.high > b.high || (a.high == b.high && a.low >= b.low);
217 }
218 
FStar_UInt128_lte(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)219 bool FStar_UInt128_lte(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
220 {
221   return a.high < b.high || (a.high == b.high && a.low <= b.low);
222 }
223 
FStar_UInt128_eq_mask(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)224 FStar_UInt128_uint128 FStar_UInt128_eq_mask(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
225 {
226   FStar_UInt128_uint128
227   flat =
228     {
229       FStar_UInt64_eq_mask(a.low,
230         b.low)
231       & FStar_UInt64_eq_mask(a.high, b.high),
232       FStar_UInt64_eq_mask(a.low,
233         b.low)
234       & FStar_UInt64_eq_mask(a.high, b.high)
235     };
236   return flat;
237 }
238 
FStar_UInt128_gte_mask(FStar_UInt128_uint128 a,FStar_UInt128_uint128 b)239 FStar_UInt128_uint128 FStar_UInt128_gte_mask(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b)
240 {
241   FStar_UInt128_uint128
242   flat =
243     {
244       (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high))
245       | (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low)),
246       (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high))
247       | (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low))
248     };
249   return flat;
250 }
251 
FStar_UInt128_uint64_to_uint128(uint64_t a)252 FStar_UInt128_uint128 FStar_UInt128_uint64_to_uint128(uint64_t a)
253 {
254   FStar_UInt128_uint128 flat = { a, (uint64_t)0U };
255   return flat;
256 }
257 
FStar_UInt128_uint128_to_uint64(FStar_UInt128_uint128 a)258 uint64_t FStar_UInt128_uint128_to_uint64(FStar_UInt128_uint128 a)
259 {
260   return a.low;
261 }
262 
263 FStar_UInt128_uint128
264 (*FStar_UInt128_op_Plus_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
265   FStar_UInt128_add;
266 
267 FStar_UInt128_uint128
268 (*FStar_UInt128_op_Plus_Question_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
269   FStar_UInt128_add_underspec;
270 
271 FStar_UInt128_uint128
272 (*FStar_UInt128_op_Plus_Percent_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
273   FStar_UInt128_add_mod;
274 
275 FStar_UInt128_uint128
276 (*FStar_UInt128_op_Subtraction_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
277   FStar_UInt128_sub;
278 
279 FStar_UInt128_uint128
280 (*FStar_UInt128_op_Subtraction_Question_Hat)(
281   FStar_UInt128_uint128 x0,
282   FStar_UInt128_uint128 x1
283 ) = FStar_UInt128_sub_underspec;
284 
285 FStar_UInt128_uint128
286 (*FStar_UInt128_op_Subtraction_Percent_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
287   FStar_UInt128_sub_mod;
288 
289 FStar_UInt128_uint128
290 (*FStar_UInt128_op_Amp_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
291   FStar_UInt128_logand;
292 
293 FStar_UInt128_uint128
294 (*FStar_UInt128_op_Hat_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
295   FStar_UInt128_logxor;
296 
297 FStar_UInt128_uint128
298 (*FStar_UInt128_op_Bar_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
299   FStar_UInt128_logor;
300 
301 FStar_UInt128_uint128
302 (*FStar_UInt128_op_Less_Less_Hat)(FStar_UInt128_uint128 x0, uint32_t x1) =
303   FStar_UInt128_shift_left;
304 
305 FStar_UInt128_uint128
306 (*FStar_UInt128_op_Greater_Greater_Hat)(FStar_UInt128_uint128 x0, uint32_t x1) =
307   FStar_UInt128_shift_right;
308 
309 bool
310 (*FStar_UInt128_op_Equals_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
311   FStar_UInt128_eq;
312 
313 bool
314 (*FStar_UInt128_op_Greater_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
315   FStar_UInt128_gt;
316 
317 bool
318 (*FStar_UInt128_op_Less_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
319   FStar_UInt128_lt;
320 
321 bool
322 (*FStar_UInt128_op_Greater_Equals_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
323   FStar_UInt128_gte;
324 
325 bool
326 (*FStar_UInt128_op_Less_Equals_Hat)(FStar_UInt128_uint128 x0, FStar_UInt128_uint128 x1) =
327   FStar_UInt128_lte;
328 
FStar_UInt128_u64_mod_32(uint64_t a)329 static uint64_t FStar_UInt128_u64_mod_32(uint64_t a)
330 {
331   return a & (uint64_t)0xffffffffU;
332 }
333 
334 static uint32_t FStar_UInt128_u32_32 = (uint32_t)32U;
335 
FStar_UInt128_u32_combine(uint64_t hi,uint64_t lo)336 static uint64_t FStar_UInt128_u32_combine(uint64_t hi, uint64_t lo)
337 {
338   return lo + (hi << FStar_UInt128_u32_32);
339 }
340 
FStar_UInt128_mul32(uint64_t x,uint32_t y)341 FStar_UInt128_uint128 FStar_UInt128_mul32(uint64_t x, uint32_t y)
342 {
343   FStar_UInt128_uint128
344   flat =
345     {
346       FStar_UInt128_u32_combine((x >> FStar_UInt128_u32_32)
347         * (uint64_t)y
348         + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32),
349         FStar_UInt128_u64_mod_32(FStar_UInt128_u64_mod_32(x) * (uint64_t)y)),
350       ((x >> FStar_UInt128_u32_32)
351       * (uint64_t)y
352       + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32))
353       >> FStar_UInt128_u32_32
354     };
355   return flat;
356 }
357 
358 typedef struct K___uint64_t_uint64_t_uint64_t_uint64_t_s
359 {
360   uint64_t fst;
361   uint64_t snd;
362   uint64_t thd;
363   uint64_t f3;
364 }
365 K___uint64_t_uint64_t_uint64_t_uint64_t;
366 
367 static K___uint64_t_uint64_t_uint64_t_uint64_t
FStar_UInt128_mul_wide_impl_t_(uint64_t x,uint64_t y)368 FStar_UInt128_mul_wide_impl_t_(uint64_t x, uint64_t y)
369 {
370   K___uint64_t_uint64_t_uint64_t_uint64_t
371   flat =
372     {
373       FStar_UInt128_u64_mod_32(x),
374       FStar_UInt128_u64_mod_32(FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y)),
375       x
376       >> FStar_UInt128_u32_32,
377       (x >> FStar_UInt128_u32_32)
378       * FStar_UInt128_u64_mod_32(y)
379       + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32)
380     };
381   return flat;
382 }
383 
FStar_UInt128_u32_combine_(uint64_t hi,uint64_t lo)384 static uint64_t FStar_UInt128_u32_combine_(uint64_t hi, uint64_t lo)
385 {
386   return lo + (hi << FStar_UInt128_u32_32);
387 }
388 
FStar_UInt128_mul_wide_impl(uint64_t x,uint64_t y)389 static FStar_UInt128_uint128 FStar_UInt128_mul_wide_impl(uint64_t x, uint64_t y)
390 {
391   K___uint64_t_uint64_t_uint64_t_uint64_t scrut = FStar_UInt128_mul_wide_impl_t_(x, y);
392   uint64_t u1 = scrut.fst;
393   uint64_t w3 = scrut.snd;
394   uint64_t x_ = scrut.thd;
395   uint64_t t_ = scrut.f3;
396   FStar_UInt128_uint128
397   flat =
398     {
399       FStar_UInt128_u32_combine_(u1 * (y >> FStar_UInt128_u32_32) + FStar_UInt128_u64_mod_32(t_),
400         w3),
401       x_
402       * (y >> FStar_UInt128_u32_32)
403       + (t_ >> FStar_UInt128_u32_32)
404       + ((u1 * (y >> FStar_UInt128_u32_32) + FStar_UInt128_u64_mod_32(t_)) >> FStar_UInt128_u32_32)
405     };
406   return flat;
407 }
408 
FStar_UInt128_mul_wide(uint64_t x,uint64_t y)409 FStar_UInt128_uint128 FStar_UInt128_mul_wide(uint64_t x, uint64_t y)
410 {
411   return FStar_UInt128_mul_wide_impl(x, y);
412 }
413 
414