1 /*
2 Copyright (C) 2014 Fredrik Johansson
3
4 This file is part of Arb.
5
6 Arb is free software: you can redistribute it and/or modify it under
7 the terms of the GNU Lesser General Public License (LGPL) as published
8 by the Free Software Foundation; either version 2.1 of the License, or
9 (at your option) any later version. See <http://www.gnu.org/licenses/>.
10 */
11
12 #include "arf.h"
13
14 int
arf_add_special(arf_t z,const arf_t x,const arf_t y,slong prec,arf_rnd_t rnd)15 arf_add_special(arf_t z, const arf_t x, const arf_t y, slong prec, arf_rnd_t rnd)
16 {
17 if (arf_is_zero(x))
18 {
19 if (arf_is_zero(y))
20 {
21 arf_zero(z);
22 return 0;
23 }
24 else
25 return arf_set_round(z, y, prec, rnd);
26 }
27 else if (arf_is_zero(y))
28 {
29 return arf_set_round(z, x, prec, rnd);
30 }
31 else if (arf_is_nan(x) || arf_is_nan(y)
32 || (arf_is_pos_inf(x) && arf_is_neg_inf(y))
33 || (arf_is_neg_inf(x) && arf_is_pos_inf(y)))
34 {
35 arf_nan(z);
36 return 0;
37 }
38 else if (arf_is_special(x))
39 {
40 arf_set(z, x);
41 return 0;
42 }
43 else
44 {
45 arf_set(z, y);
46 return 0;
47 }
48 }
49
50 int
arf_add(arf_ptr z,arf_srcptr x,arf_srcptr y,slong prec,arf_rnd_t rnd)51 arf_add(arf_ptr z, arf_srcptr x, arf_srcptr y, slong prec, arf_rnd_t rnd)
52 {
53 mp_size_t xn, yn;
54 mp_srcptr xptr, yptr;
55 slong shift;
56
57 if (arf_is_special(x) || arf_is_special(y))
58 {
59 return arf_add_special(z, x, y, prec, rnd);
60 }
61
62 shift = _fmpz_sub_small(ARF_EXPREF(x), ARF_EXPREF(y));
63
64 if (shift < 0)
65 {
66 arf_srcptr __t;
67 __t = x; x = y; y = __t;
68 shift = -shift;
69 }
70
71 ARF_GET_MPN_READONLY(xptr, xn, x);
72 ARF_GET_MPN_READONLY(yptr, yn, y);
73
74 return _arf_add_mpn(z, xptr, xn, ARF_SGNBIT(x), ARF_EXPREF(x),
75 yptr, yn, ARF_SGNBIT(y), shift, prec, rnd);
76 }
77
78 int
arf_add_si(arf_ptr z,arf_srcptr x,slong y,slong prec,arf_rnd_t rnd)79 arf_add_si(arf_ptr z, arf_srcptr x, slong y, slong prec, arf_rnd_t rnd)
80 {
81 mp_size_t xn, yn;
82 mp_srcptr xptr, yptr;
83 mp_limb_t ytmp;
84 int xsgnbit, ysgnbit;
85 fmpz yexp;
86 slong shift;
87
88 if (y == 0)
89 {
90 return arf_set_round(z, x, prec, rnd);
91 }
92 else if (arf_is_special(x))
93 {
94 if (arf_is_zero(x))
95 return arf_set_round_si(z, y, prec, rnd);
96 else
97 {
98 arf_set(z, x);
99 return 0;
100 }
101 }
102
103 ysgnbit = (y < 0);
104 if (ysgnbit)
105 ytmp = -y;
106 else
107 ytmp = y;
108 yptr = &ytmp;
109 yn = 1;
110 yexp = FLINT_BITS;
111 /* XXX: worth the trouble to top-align y? */
112
113 shift = _fmpz_sub_small(ARF_EXPREF(x), &yexp);
114
115 xsgnbit = ARF_SGNBIT(x);
116 ARF_GET_MPN_READONLY(xptr, xn, x);
117
118 if (shift >= 0)
119 return _arf_add_mpn(z, xptr, xn, xsgnbit, ARF_EXPREF(x),
120 yptr, yn, ysgnbit, shift, prec, rnd);
121 else
122 return _arf_add_mpn(z, yptr, yn, ysgnbit, &yexp,
123 xptr, xn, xsgnbit, -shift, prec, rnd);
124 }
125
126 int
arf_add_ui(arf_ptr z,arf_srcptr x,ulong y,slong prec,arf_rnd_t rnd)127 arf_add_ui(arf_ptr z, arf_srcptr x, ulong y, slong prec, arf_rnd_t rnd)
128 {
129 mp_size_t xn, yn;
130 mp_srcptr xptr, yptr;
131 mp_limb_t ytmp;
132 int xsgnbit, ysgnbit;
133 fmpz yexp;
134 slong shift;
135
136 if (y == 0)
137 {
138 return arf_set_round(z, x, prec, rnd);
139 }
140 else if (arf_is_special(x))
141 {
142 if (arf_is_zero(x))
143 return arf_set_round_ui(z, y, prec, rnd);
144 else
145 {
146 arf_set(z, x);
147 return 0;
148 }
149 }
150
151 ysgnbit = 0;
152 ytmp = y;
153 yptr = &ytmp;
154 yn = 1;
155
156 yexp = FLINT_BITS;
157 shift = _fmpz_sub_small(ARF_EXPREF(x), &yexp);
158
159 xsgnbit = ARF_SGNBIT(x);
160 ARF_GET_MPN_READONLY(xptr, xn, x);
161
162 if (shift >= 0)
163 return _arf_add_mpn(z, xptr, xn, xsgnbit, ARF_EXPREF(x),
164 yptr, yn, ysgnbit, shift, prec, rnd);
165 else
166 return _arf_add_mpn(z, yptr, yn, ysgnbit, &yexp,
167 xptr, xn, xsgnbit, -shift, prec, rnd);
168 }
169
170 int
arf_add_fmpz(arf_ptr z,arf_srcptr x,const fmpz_t y,slong prec,arf_rnd_t rnd)171 arf_add_fmpz(arf_ptr z, arf_srcptr x, const fmpz_t y, slong prec, arf_rnd_t rnd)
172 {
173 mp_size_t xn, yn;
174 mp_srcptr xptr, yptr;
175 mp_limb_t ytmp;
176 int xsgnbit, ysgnbit;
177 fmpz yexp;
178 slong shift;
179
180 if (fmpz_is_zero(y))
181 {
182 return arf_set_round(z, x, prec, rnd);
183 }
184 else if (arf_is_special(x))
185 {
186 if (arf_is_zero(x))
187 return arf_set_round_fmpz(z, y, prec, rnd);
188 else
189 {
190 arf_set(z, x);
191 return 0;
192 }
193 }
194
195 FMPZ_GET_MPN_READONLY(ysgnbit, yn, yptr, ytmp, *y)
196 yexp = yn * FLINT_BITS;
197 shift = _fmpz_sub_small(ARF_EXPREF(x), &yexp);
198
199 xsgnbit = ARF_SGNBIT(x);
200 ARF_GET_MPN_READONLY(xptr, xn, x);
201
202 if (shift >= 0)
203 return _arf_add_mpn(z, xptr, xn, xsgnbit, ARF_EXPREF(x),
204 yptr, yn, ysgnbit, shift, prec, rnd);
205 else
206 return _arf_add_mpn(z, yptr, yn, ysgnbit, &yexp,
207 xptr, xn, xsgnbit, -shift, prec, rnd);
208 }
209
210 int
arf_add_fmpz_2exp(arf_ptr z,arf_srcptr x,const fmpz_t y,const fmpz_t exp,slong prec,arf_rnd_t rnd)211 arf_add_fmpz_2exp(arf_ptr z, arf_srcptr x, const fmpz_t y, const fmpz_t exp, slong prec, arf_rnd_t rnd)
212 {
213 mp_size_t xn, yn;
214 mp_srcptr xptr, yptr;
215 mp_limb_t ytmp;
216 int xsgnbit, ysgnbit, inexact;
217 fmpz_t yexp;
218 slong shift;
219
220 if (fmpz_is_zero(y))
221 {
222 return arf_set_round(z, x, prec, rnd);
223 }
224 else if (arf_is_special(x))
225 {
226 if (arf_is_zero(x))
227 {
228 inexact = arf_set_round_fmpz(z, y, prec, rnd);
229 arf_mul_2exp_fmpz(z, z, exp);
230 return inexact;
231 }
232 else
233 {
234 arf_set(z, x);
235 return 0;
236 }
237 }
238
239 FMPZ_GET_MPN_READONLY(ysgnbit, yn, yptr, ytmp, *y)
240 fmpz_init(yexp);
241 fmpz_add_ui(yexp, exp, yn * FLINT_BITS);
242 shift = _fmpz_sub_small(ARF_EXPREF(x), yexp);
243
244 xsgnbit = ARF_SGNBIT(x);
245 ARF_GET_MPN_READONLY(xptr, xn, x);
246
247 if (shift >= 0)
248 inexact = _arf_add_mpn(z, xptr, xn, xsgnbit, ARF_EXPREF(x),
249 yptr, yn, ysgnbit, shift, prec, rnd);
250 else
251 inexact = _arf_add_mpn(z, yptr, yn, ysgnbit, yexp,
252 xptr, xn, xsgnbit, -shift, prec, rnd);
253
254 fmpz_clear(yexp);
255 return inexact;
256 }
257
258