Lines Matching refs:dest

32 fp_fabs(struct fp_ext *dest, struct fp_ext *src)  in fp_fabs()  argument
36 fp_monadic_check(dest, src); in fp_fabs()
38 dest->sign = 0; in fp_fabs()
40 return dest; in fp_fabs()
44 fp_fneg(struct fp_ext *dest, struct fp_ext *src) in fp_fneg() argument
48 fp_monadic_check(dest, src); in fp_fneg()
50 dest->sign = !dest->sign; in fp_fneg()
52 return dest; in fp_fneg()
61 fp_fadd(struct fp_ext *dest, struct fp_ext *src) in fp_fadd() argument
67 fp_dyadic_check(dest, src); in fp_fadd()
69 if (IS_INF(dest)) { in fp_fadd()
71 if (IS_INF(src) && (src->sign != dest->sign)) in fp_fadd()
72 fp_set_nan(dest); in fp_fadd()
73 return dest; in fp_fadd()
76 fp_copy_ext(dest, src); in fp_fadd()
77 return dest; in fp_fadd()
80 if (IS_ZERO(dest)) { in fp_fadd()
82 if (src->sign != dest->sign) { in fp_fadd()
84 dest->sign = 1; in fp_fadd()
86 dest->sign = 0; in fp_fadd()
89 fp_copy_ext(dest, src); in fp_fadd()
90 return dest; in fp_fadd()
93 dest->lowmant = src->lowmant = 0; in fp_fadd()
95 if ((diff = dest->exp - src->exp) > 0) in fp_fadd()
98 fp_denormalize(dest, diff); in fp_fadd()
100 if (dest->sign == src->sign) { in fp_fadd()
101 if (fp_addmant(dest, src)) in fp_fadd()
102 if (!fp_addcarry(dest)) in fp_fadd()
103 return dest; in fp_fadd()
105 if (dest->mant.m64 < src->mant.m64) { in fp_fadd()
106 fp_submant(dest, src, dest); in fp_fadd()
107 dest->sign = !dest->sign; in fp_fadd()
109 fp_submant(dest, dest, src); in fp_fadd()
112 return dest; in fp_fadd()
121 fp_fsub(struct fp_ext *dest, struct fp_ext *src) in fp_fsub() argument
126 return fp_fadd(dest, src); in fp_fsub()
131 fp_fcmp(struct fp_ext *dest, struct fp_ext *src) in fp_fcmp() argument
135 FPDATA->temp[1] = *dest; in fp_fcmp()
141 fp_ftst(struct fp_ext *dest, struct fp_ext *src) in fp_ftst() argument
145 (void)dest; in fp_ftst()
151 fp_fmul(struct fp_ext *dest, struct fp_ext *src) in fp_fmul() argument
158 fp_dyadic_check(dest, src); in fp_fmul()
161 dest->sign = src->sign ^ dest->sign; in fp_fmul()
164 if (IS_INF(dest)) { in fp_fmul()
166 fp_set_nan(dest); in fp_fmul()
167 return dest; in fp_fmul()
170 if (IS_ZERO(dest)) in fp_fmul()
171 fp_set_nan(dest); in fp_fmul()
173 fp_copy_ext(dest, src); in fp_fmul()
174 return dest; in fp_fmul()
180 if (IS_ZERO(dest) || IS_ZERO(src)) { in fp_fmul()
181 dest->exp = 0; in fp_fmul()
182 dest->mant.m64 = 0; in fp_fmul()
183 dest->lowmant = 0; in fp_fmul()
185 return dest; in fp_fmul()
188 exp = dest->exp + src->exp - 0x3ffe; in fp_fmul()
193 if ((long)dest->mant.m32[0] >= 0) in fp_fmul()
194 exp -= fp_overnormalize(dest); in fp_fmul()
199 fp_multiplymant(&temp, dest, src); in fp_fmul()
205 fp_putmant128(dest, &temp, 1); in fp_fmul()
207 fp_putmant128(dest, &temp, 0); in fp_fmul()
210 fp_set_ovrflw(dest); in fp_fmul()
211 return dest; in fp_fmul()
213 dest->exp = exp; in fp_fmul()
216 fp_denormalize(dest, -exp); in fp_fmul()
219 return dest; in fp_fmul()
229 fp_fdiv(struct fp_ext *dest, struct fp_ext *src) in fp_fdiv() argument
236 fp_dyadic_check(dest, src); in fp_fdiv()
239 dest->sign = src->sign ^ dest->sign; in fp_fdiv()
242 if (IS_INF(dest)) { in fp_fdiv()
245 fp_set_nan(dest); in fp_fdiv()
247 return dest; in fp_fdiv()
251 dest->exp = 0; in fp_fdiv()
252 dest->mant.m64 = 0; in fp_fdiv()
253 dest->lowmant = 0; in fp_fdiv()
255 return dest; in fp_fdiv()
259 if (IS_ZERO(dest)) { in fp_fdiv()
262 fp_set_nan(dest); in fp_fdiv()
264 return dest; in fp_fdiv()
269 dest->exp = 0x7fff; in fp_fdiv()
270 dest->mant.m64 = 0; in fp_fdiv()
272 return dest; in fp_fdiv()
275 exp = dest->exp - src->exp + 0x3fff; in fp_fdiv()
280 if ((long)dest->mant.m32[0] >= 0) in fp_fdiv()
281 exp -= fp_overnormalize(dest); in fp_fdiv()
286 fp_dividemant(&temp, dest, src); in fp_fdiv()
292 fp_putmant128(dest, &temp, 32); in fp_fdiv()
294 fp_putmant128(dest, &temp, 31); in fp_fdiv()
297 fp_set_ovrflw(dest); in fp_fdiv()
298 return dest; in fp_fdiv()
300 dest->exp = exp; in fp_fdiv()
303 fp_denormalize(dest, -exp); in fp_fdiv()
306 return dest; in fp_fdiv()
310 fp_fsglmul(struct fp_ext *dest, struct fp_ext *src) in fp_fsglmul() argument
316 fp_dyadic_check(dest, src); in fp_fsglmul()
319 dest->sign = src->sign ^ dest->sign; in fp_fsglmul()
322 if (IS_INF(dest)) { in fp_fsglmul()
324 fp_set_nan(dest); in fp_fsglmul()
325 return dest; in fp_fsglmul()
328 if (IS_ZERO(dest)) in fp_fsglmul()
329 fp_set_nan(dest); in fp_fsglmul()
331 fp_copy_ext(dest, src); in fp_fsglmul()
332 return dest; in fp_fsglmul()
338 if (IS_ZERO(dest) || IS_ZERO(src)) { in fp_fsglmul()
339 dest->exp = 0; in fp_fsglmul()
340 dest->mant.m64 = 0; in fp_fsglmul()
341 dest->lowmant = 0; in fp_fsglmul()
343 return dest; in fp_fsglmul()
346 exp = dest->exp + src->exp - 0x3ffe; in fp_fsglmul()
349 fp_mul64(dest->mant.m32[0], dest->mant.m32[1], in fp_fsglmul()
350 dest->mant.m32[0] & 0xffffff00, in fp_fsglmul()
354 fp_set_ovrflw(dest); in fp_fsglmul()
355 return dest; in fp_fsglmul()
357 dest->exp = exp; in fp_fsglmul()
360 fp_denormalize(dest, -exp); in fp_fsglmul()
363 return dest; in fp_fsglmul()
367 fp_fsgldiv(struct fp_ext *dest, struct fp_ext *src) in fp_fsgldiv() argument
374 fp_dyadic_check(dest, src); in fp_fsgldiv()
377 dest->sign = src->sign ^ dest->sign; in fp_fsgldiv()
380 if (IS_INF(dest)) { in fp_fsgldiv()
383 fp_set_nan(dest); in fp_fsgldiv()
385 return dest; in fp_fsgldiv()
389 dest->exp = 0; in fp_fsgldiv()
390 dest->mant.m64 = 0; in fp_fsgldiv()
391 dest->lowmant = 0; in fp_fsgldiv()
393 return dest; in fp_fsgldiv()
397 if (IS_ZERO(dest)) { in fp_fsgldiv()
400 fp_set_nan(dest); in fp_fsgldiv()
402 return dest; in fp_fsgldiv()
407 dest->exp = 0x7fff; in fp_fsgldiv()
408 dest->mant.m64 = 0; in fp_fsgldiv()
410 return dest; in fp_fsgldiv()
413 exp = dest->exp - src->exp + 0x3fff; in fp_fsgldiv()
415 dest->mant.m32[0] &= 0xffffff00; in fp_fsgldiv()
419 if (dest->mant.m32[0] >= src->mant.m32[0]) { in fp_fsgldiv()
420 fp_sub64(dest->mant, src->mant); in fp_fsgldiv()
421 fp_div64(quot, rem, dest->mant.m32[0], 0, src->mant.m32[0]); in fp_fsgldiv()
422 dest->mant.m32[0] = 0x80000000 | (quot >> 1); in fp_fsgldiv()
423 dest->mant.m32[1] = (quot & 1) | rem; /* only for rounding */ in fp_fsgldiv()
425 fp_div64(quot, rem, dest->mant.m32[0], 0, src->mant.m32[0]); in fp_fsgldiv()
426 dest->mant.m32[0] = quot; in fp_fsgldiv()
427 dest->mant.m32[1] = rem; /* only for rounding */ in fp_fsgldiv()
432 fp_set_ovrflw(dest); in fp_fsgldiv()
433 return dest; in fp_fsgldiv()
435 dest->exp = exp; in fp_fsgldiv()
438 fp_denormalize(dest, -exp); in fp_fsgldiv()
441 return dest; in fp_fsgldiv()
450 static void fp_roundint(struct fp_ext *dest, int mode) in fp_roundint() argument
455 if (!fp_normalize_ext(dest)) in fp_roundint()
459 if (IS_INF(dest) || IS_ZERO(dest)) in fp_roundint()
463 oldmant = dest->mant; in fp_roundint()
464 switch (dest->exp) { in fp_roundint()
466 dest->mant.m64 = 0; in fp_roundint()
469 dest->mant.m32[0] &= 0xffffffffU << (0x401e - dest->exp); in fp_roundint()
470 dest->mant.m32[1] = 0; in fp_roundint()
471 if (oldmant.m64 == dest->mant.m64) in fp_roundint()
475 dest->mant.m32[1] &= 0xffffffffU << (0x403e - dest->exp); in fp_roundint()
476 if (oldmant.m32[1] == dest->mant.m32[1]) in fp_roundint()
498 switch (dest->exp) { in fp_roundint()
510 mask = 1 << (0x401d - dest->exp); in fp_roundint()
515 if (!(oldmant.m32[0] << (dest->exp - 0x3ffd)) && in fp_roundint()
528 mask = 1 << (0x403d - dest->exp); in fp_roundint()
533 if (!(oldmant.m32[1] << (dest->exp - 0x401d))) in fp_roundint()
543 if (dest->sign ^ (mode - FPCR_ROUND_RM)) in fp_roundint()
548 switch (dest->exp) { in fp_roundint()
550 dest->exp = 0x3fff; in fp_roundint()
551 dest->mant.m64 = 1ULL << 63; in fp_roundint()
554 mask = 1 << (0x401e - dest->exp); in fp_roundint()
555 if (dest->mant.m32[0] += mask) in fp_roundint()
557 dest->mant.m32[0] = 0x80000000; in fp_roundint()
558 dest->exp++; in fp_roundint()
561 mask = 1 << (0x403e - dest->exp); in fp_roundint()
562 if (dest->mant.m32[1] += mask) in fp_roundint()
564 if (dest->mant.m32[0] += 1) in fp_roundint()
566 dest->mant.m32[0] = 0x80000000; in fp_roundint()
567 dest->exp++; in fp_roundint()
577 modrem_kernel(struct fp_ext *dest, struct fp_ext *src, int mode) in modrem_kernel() argument
581 fp_dyadic_check(dest, src); in modrem_kernel()
584 if (IS_INF(dest) || IS_ZERO(src)) { in modrem_kernel()
585 fp_set_nan(dest); in modrem_kernel()
586 return dest; in modrem_kernel()
588 if (IS_ZERO(dest) || IS_INF(src)) in modrem_kernel()
589 return dest; in modrem_kernel()
592 fp_copy_ext(&tmp, dest); in modrem_kernel()
596 fp_fsub(dest, &tmp); in modrem_kernel()
599 fp_set_quotient((dest->mant.m64 & 0x7f) | (dest->sign << 7)); in modrem_kernel()
600 return dest; in modrem_kernel()
611 fp_fmod(struct fp_ext *dest, struct fp_ext *src) in fp_fmod() argument
614 return modrem_kernel(dest, src, FPCR_ROUND_RZ); in fp_fmod()
623 fp_frem(struct fp_ext *dest, struct fp_ext *src) in fp_frem() argument
626 return modrem_kernel(dest, src, FPCR_ROUND_RN); in fp_frem()
630 fp_fint(struct fp_ext *dest, struct fp_ext *src) in fp_fint() argument
634 fp_copy_ext(dest, src); in fp_fint()
636 fp_roundint(dest, FPDATA->rnd); in fp_fint()
638 return dest; in fp_fint()
642 fp_fintrz(struct fp_ext *dest, struct fp_ext *src) in fp_fintrz() argument
646 fp_copy_ext(dest, src); in fp_fintrz()
648 fp_roundint(dest, FPCR_ROUND_RZ); in fp_fintrz()
650 return dest; in fp_fintrz()
654 fp_fscale(struct fp_ext *dest, struct fp_ext *src) in fp_fscale() argument
660 fp_dyadic_check(dest, src); in fp_fscale()
664 fp_set_nan(dest); in fp_fscale()
665 return dest; in fp_fscale()
667 if (IS_INF(dest)) in fp_fscale()
668 return dest; in fp_fscale()
671 if (IS_ZERO(src) || IS_ZERO(dest)) in fp_fscale()
672 return dest; in fp_fscale()
676 fp_set_ovrflw(dest); in fp_fscale()
677 return dest; in fp_fscale()
687 scale += dest->exp; in fp_fscale()
690 fp_set_ovrflw(dest); in fp_fscale()
693 fp_denormalize(dest, -scale); in fp_fscale()
695 dest->exp = scale; in fp_fscale()
697 return dest; in fp_fscale()