xref: /minix/lib/libc_vfp/vfpdf.S (revision 84d9c625)
1*84d9c625SLionel Sambuc/*-
2*84d9c625SLionel Sambuc * Copyright (c) 2013 The NetBSD Foundation, Inc.
3*84d9c625SLionel Sambuc * All rights reserved.
4*84d9c625SLionel Sambuc *
5*84d9c625SLionel Sambuc * This code is derived from software contributed to The NetBSD Foundation
6*84d9c625SLionel Sambuc * by Matt Thomas of 3am Software Foundry.
7*84d9c625SLionel Sambuc *
8*84d9c625SLionel Sambuc * Redistribution and use in source and binary forms, with or without
9*84d9c625SLionel Sambuc * modification, are permitted provided that the following conditions
10*84d9c625SLionel Sambuc * are met:
11*84d9c625SLionel Sambuc * 1. Redistributions of source code must retain the above copyright
12*84d9c625SLionel Sambuc *    notice, this list of conditions and the following disclaimer.
13*84d9c625SLionel Sambuc * 2. Redistributions in binary form must reproduce the above copyright
14*84d9c625SLionel Sambuc *    notice, this list of conditions and the following disclaimer in the
15*84d9c625SLionel Sambuc *    documentation and/or other materials provided with the distribution.
16*84d9c625SLionel Sambuc *
17*84d9c625SLionel Sambuc * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
18*84d9c625SLionel Sambuc * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
19*84d9c625SLionel Sambuc * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20*84d9c625SLionel Sambuc * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
21*84d9c625SLionel Sambuc * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22*84d9c625SLionel Sambuc * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23*84d9c625SLionel Sambuc * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24*84d9c625SLionel Sambuc * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25*84d9c625SLionel Sambuc * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26*84d9c625SLionel Sambuc * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27*84d9c625SLionel Sambuc * POSSIBILITY OF SUCH DAMAGE.
28*84d9c625SLionel Sambuc */
29*84d9c625SLionel Sambuc
30*84d9c625SLionel Sambuc#include <arm/asm.h>
31*84d9c625SLionel Sambuc
32*84d9c625SLionel SambucRCSID("$NetBSD: vfpdf.S,v 1.2 2013/06/23 06:19:55 matt Exp $")
33*84d9c625SLionel Sambuc
34*84d9c625SLionel Sambuc/*
35*84d9c625SLionel Sambuc * This file provides softfloat compatible routines which use VFP instructions
36*84d9c625SLionel Sambuc * to do the actual work.  This should give near hard-float performance while
37*84d9c625SLionel Sambuc * being compatible with soft-float code.
38*84d9c625SLionel Sambuc *
39*84d9c625SLionel Sambuc * This file implements the double precision floating point routines.
40*84d9c625SLionel Sambuc */
41*84d9c625SLionel Sambuc
42*84d9c625SLionel Sambuc#ifdef	__ARMEL__
43*84d9c625SLionel Sambuc#define	vmov_arg0	vmov	d0, r0, r1
44*84d9c625SLionel Sambuc#define	vmov_arg1	vmov	d1, r2, r3
45*84d9c625SLionel Sambuc#define	vmov_ret	vmov	r0, r1, d0
46*84d9c625SLionel Sambuc#else
47*84d9c625SLionel Sambuc#define	vmov_arg0	vmov	d0, r1, r0
48*84d9c625SLionel Sambuc#define	vmov_arg1	vmov	d1, r3, r2
49*84d9c625SLionel Sambuc#define	vmov_ret	vmov	r1, r0, d0
50*84d9c625SLionel Sambuc#endif
51*84d9c625SLionel Sambuc#define	vmov_args	vmov_arg0; vmov_arg1
52*84d9c625SLionel Sambuc
53*84d9c625SLionel Sambuc#ifdef __ARM_EABI__
54*84d9c625SLionel Sambuc#define	__adddf3	__aeabi_dadd
55*84d9c625SLionel Sambuc#define	__divdf3	__aeabi_ddiv
56*84d9c625SLionel Sambuc#define	__muldf3	__aeabi_dmul
57*84d9c625SLionel Sambuc#define	__subdf3	__aeabi_dsub
58*84d9c625SLionel Sambuc#define	__negdf2	__aeabi_dneg
59*84d9c625SLionel Sambuc#define	__extendsfdf2	__aeabi_f2d
60*84d9c625SLionel Sambuc#define	__fixdfsi	__aeabi_d2iz
61*84d9c625SLionel Sambuc#define	__fixunsdfsi	__aeabi_d2uiz
62*84d9c625SLionel Sambuc#define	__floatsidf	__aeabi_i2d
63*84d9c625SLionel Sambuc#define	__floatunsidf	__aeabi_ui2d
64*84d9c625SLionel Sambuc#endif
65*84d9c625SLionel Sambuc
66*84d9c625SLionel SambucENTRY(__adddf3)
67*84d9c625SLionel Sambuc	vmov_args
68*84d9c625SLionel Sambuc	vadd.f64	d0, d0, d1
69*84d9c625SLionel Sambuc	vmov_ret
70*84d9c625SLionel Sambuc	RET
71*84d9c625SLionel SambucEND(__adddf3)
72*84d9c625SLionel Sambuc
73*84d9c625SLionel SambucENTRY(__subdf3)
74*84d9c625SLionel Sambuc	vmov_args
75*84d9c625SLionel Sambuc	vsub.f64	d0, d0, d1
76*84d9c625SLionel Sambuc	vmov_ret
77*84d9c625SLionel Sambuc	RET
78*84d9c625SLionel SambucEND(__subdf3)
79*84d9c625SLionel Sambuc
80*84d9c625SLionel Sambuc#ifdef __ARM_EABI__
81*84d9c625SLionel SambucENTRY(__aeabi_drsub)
82*84d9c625SLionel Sambuc	vmov_args
83*84d9c625SLionel Sambuc	vsub.f64	d0, d1, d0
84*84d9c625SLionel Sambuc	vmov_ret
85*84d9c625SLionel Sambuc	RET
86*84d9c625SLionel SambucEND(__aeabi_drsub)
87*84d9c625SLionel Sambuc#endif
88*84d9c625SLionel Sambuc
89*84d9c625SLionel SambucENTRY(__muldf3)
90*84d9c625SLionel Sambuc	vmov_args
91*84d9c625SLionel Sambuc	vmul.f64	d0, d0, d1
92*84d9c625SLionel Sambuc	vmov_ret
93*84d9c625SLionel Sambuc	RET
94*84d9c625SLionel SambucEND(__muldf3)
95*84d9c625SLionel Sambuc
96*84d9c625SLionel SambucENTRY(__divdf3)
97*84d9c625SLionel Sambuc	vmov_args
98*84d9c625SLionel Sambuc	vdiv.f64	d0, d0, d1
99*84d9c625SLionel Sambuc	vmov_ret
100*84d9c625SLionel Sambuc	RET
101*84d9c625SLionel SambucEND(__divdf3)
102*84d9c625SLionel Sambuc
103*84d9c625SLionel SambucENTRY(__negdf2)
104*84d9c625SLionel Sambuc	vmov_arg0
105*84d9c625SLionel Sambuc	vneg.f64	d0, d0
106*84d9c625SLionel Sambuc	vmov_ret
107*84d9c625SLionel Sambuc	RET
108*84d9c625SLionel SambucEND(__negdf2)
109*84d9c625SLionel Sambuc
110*84d9c625SLionel SambucENTRY(__extendsfdf2)
111*84d9c625SLionel Sambuc	vmov		s0, r0
112*84d9c625SLionel Sambuc	vcvt.f64.f32	d0, s0
113*84d9c625SLionel Sambuc	vmov_ret
114*84d9c625SLionel Sambuc	RET
115*84d9c625SLionel SambucEND(__extendsfdf2)
116*84d9c625SLionel Sambuc
117*84d9c625SLionel SambucENTRY(__fixdfsi)
118*84d9c625SLionel Sambuc	vmov_arg0
119*84d9c625SLionel Sambuc	vcvt.s32.f64	s0, d0
120*84d9c625SLionel Sambuc	vmov		r0, s0
121*84d9c625SLionel Sambuc	RET
122*84d9c625SLionel SambucEND(__fixdfsi)
123*84d9c625SLionel Sambuc
124*84d9c625SLionel SambucENTRY(__fixunsdfsi)
125*84d9c625SLionel Sambuc	vmov_arg0
126*84d9c625SLionel Sambuc	vcvt.u32.f64	s0, d0
127*84d9c625SLionel Sambuc	vmov		r0, s0
128*84d9c625SLionel Sambuc	RET
129*84d9c625SLionel SambucEND(__fixunsdfsi)
130*84d9c625SLionel Sambuc
131*84d9c625SLionel SambucENTRY(__floatsidf)
132*84d9c625SLionel Sambuc	vmov		s0, r0
133*84d9c625SLionel Sambuc	vcvt.f64.s32	d0, s0
134*84d9c625SLionel Sambuc	vmov_ret
135*84d9c625SLionel Sambuc	RET
136*84d9c625SLionel SambucEND(__floatsidf)
137*84d9c625SLionel Sambuc
138*84d9c625SLionel SambucENTRY(__floatunsidf)
139*84d9c625SLionel Sambuc	vmov		s0, r0
140*84d9c625SLionel Sambuc	vcvt.f64.u32	d0, s0
141*84d9c625SLionel Sambuc	vmov_ret
142*84d9c625SLionel Sambuc	RET
143*84d9c625SLionel SambucEND(__floatunsidf)
144*84d9c625SLionel Sambuc
145*84d9c625SLionel Sambuc/*
146*84d9c625SLionel Sambuc * Effect of a floating point comparision on the condition flags.
147*84d9c625SLionel Sambuc *      N Z C V
148*84d9c625SLionel Sambuc * EQ = 0 1 1 0
149*84d9c625SLionel Sambuc * LT = 1 0 0 0
150*84d9c625SLionel Sambuc * GT = 0 0 1 0
151*84d9c625SLionel Sambuc * UN = 0 0 1 1
152*84d9c625SLionel Sambuc */
153*84d9c625SLionel Sambuc#ifdef __ARM_EABI__
154*84d9c625SLionel SambucENTRY(__aeabi_cdcmpeq)
155*84d9c625SLionel Sambuc	vmov_args
156*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
157*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
158*84d9c625SLionel Sambuc	RET
159*84d9c625SLionel SambucEND(__aeabi_cdcmpeq)
160*84d9c625SLionel Sambuc
161*84d9c625SLionel SambucENTRY(__aeabi_cdcmple)
162*84d9c625SLionel Sambuc	vmov_args
163*84d9c625SLionel Sambuc	vcmpe.f64	d0, d1
164*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
165*84d9c625SLionel Sambuc	RET
166*84d9c625SLionel SambucEND(__aeabi_cdcmple)
167*84d9c625SLionel Sambuc
168*84d9c625SLionel SambucENTRY(__aeabi_cdrcmple)
169*84d9c625SLionel Sambuc	vmov_args
170*84d9c625SLionel Sambuc	vcmpe.f64	d1, d0
171*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
172*84d9c625SLionel Sambuc	RET
173*84d9c625SLionel SambucEND(__aeabi_cdrcmple)
174*84d9c625SLionel Sambuc
175*84d9c625SLionel SambucENTRY(__aeabi_dcmpeq)
176*84d9c625SLionel Sambuc	vmov_args
177*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
178*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
179*84d9c625SLionel Sambuc	moveq		r0, #1		/* (a == b) */
180*84d9c625SLionel Sambuc	movne		r0, #0		/* (a != b) or unordered */
181*84d9c625SLionel Sambuc	RET
182*84d9c625SLionel SambucEND(__aeabi_dcmpeq)
183*84d9c625SLionel Sambuc
184*84d9c625SLionel SambucENTRY(__aeabi_dcmplt)
185*84d9c625SLionel Sambuc	vmov_args
186*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
187*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
188*84d9c625SLionel Sambuc	movlt		r0, #1		/* (a < b) */
189*84d9c625SLionel Sambuc	movcs		r0, #0		/* (a >= b) or unordered */
190*84d9c625SLionel Sambuc	RET
191*84d9c625SLionel SambucEND(__aeabi_dcmplt)
192*84d9c625SLionel Sambuc
193*84d9c625SLionel SambucENTRY(__aeabi_dcmple)
194*84d9c625SLionel Sambuc	vmov_args
195*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
196*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
197*84d9c625SLionel Sambuc	movls		r0, #1		/* (a <= b) */
198*84d9c625SLionel Sambuc	movhi		r0, #0		/* (a > b) or unordered */
199*84d9c625SLionel Sambuc	RET
200*84d9c625SLionel SambucEND(__aeabi_dcmple)
201*84d9c625SLionel Sambuc
202*84d9c625SLionel SambucENTRY(__aeabi_dcmpge)
203*84d9c625SLionel Sambuc	vmov_args
204*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
205*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
206*84d9c625SLionel Sambuc	movge		r0, #1		/* (a >= b) */
207*84d9c625SLionel Sambuc	movlt		r0, #0		/* (a < b) or unordered */
208*84d9c625SLionel Sambuc	RET
209*84d9c625SLionel SambucEND(__aeabi_dcmpge)
210*84d9c625SLionel Sambuc
211*84d9c625SLionel SambucENTRY(__aeabi_dcmpgt)
212*84d9c625SLionel Sambuc	vmov_args
213*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
214*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
215*84d9c625SLionel Sambuc	movgt		r0, #1		/* (a > b) */
216*84d9c625SLionel Sambuc	movle		r0, #0		/* (a <= b) or unordered */
217*84d9c625SLionel Sambuc	RET
218*84d9c625SLionel SambucEND(__aeabi_dcmpgt)
219*84d9c625SLionel Sambuc
220*84d9c625SLionel SambucENTRY(__aeabi_dcmpun)
221*84d9c625SLionel Sambuc	vmov_args
222*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
223*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
224*84d9c625SLionel Sambuc	movvs		r0, #1		/* (isnan(a) || isnan(b)) */
225*84d9c625SLionel Sambuc	movvc		r0, #0		/* !isnan(a) && !isnan(b) */
226*84d9c625SLionel Sambuc	RET
227*84d9c625SLionel SambucEND(__aeabi_dcmpun)
228*84d9c625SLionel Sambuc
229*84d9c625SLionel Sambuc#else
230*84d9c625SLionel Sambuc/* N set if compare <= result */
231*84d9c625SLionel Sambuc/* Z set if compare = result */
232*84d9c625SLionel Sambuc/* C set if compare (=,>=,UNORD) result */
233*84d9c625SLionel Sambuc/* V set if compare UNORD result */
234*84d9c625SLionel Sambuc
235*84d9c625SLionel SambucSTRONG_ALIAS(__eqdf2, __nedf2)
236*84d9c625SLionel SambucENTRY(__nedf2)
237*84d9c625SLionel Sambuc	vmov_args
238*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
239*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
240*84d9c625SLionel Sambuc	moveq		r0, #0		/* !(a == b) */
241*84d9c625SLionel Sambuc	movne		r0, #1		/* !(a == b) */
242*84d9c625SLionel Sambuc	RET
243*84d9c625SLionel SambucEND(__nedf2)
244*84d9c625SLionel Sambuc
245*84d9c625SLionel SambucSTRONG_ALIAS(__gedf2, __ltdf2)
246*84d9c625SLionel SambucENTRY(__ltdf2)
247*84d9c625SLionel Sambuc	vmov_args
248*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
249*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
250*84d9c625SLionel Sambuc	mvnmi		r0, #0		/* -(a < b) */
251*84d9c625SLionel Sambuc	movpl		r0, #0		/* -(a < b) */
252*84d9c625SLionel Sambuc	RET
253*84d9c625SLionel SambucEND(__ltdf2)
254*84d9c625SLionel Sambuc
255*84d9c625SLionel SambucSTRONG_ALIAS(__gtdf2, __ledf2)
256*84d9c625SLionel SambucENTRY(__ledf2)
257*84d9c625SLionel Sambuc	vmov_args
258*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
259*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
260*84d9c625SLionel Sambuc	movgt		r0, #1		/* (a > b) */
261*84d9c625SLionel Sambuc	movle		r0, #0		/* (a > b) */
262*84d9c625SLionel Sambuc	RET
263*84d9c625SLionel SambucEND(__ledf2)
264*84d9c625SLionel Sambuc
265*84d9c625SLionel SambucENTRY(__unorddf2)
266*84d9c625SLionel Sambuc	vmov_args
267*84d9c625SLionel Sambuc	vcmp.f64	d0, d1
268*84d9c625SLionel Sambuc	vmrs		APSR_nzcv, fpscr
269*84d9c625SLionel Sambuc	movvs		r0, #1		/* isnan(a) || isnan(b) */
270*84d9c625SLionel Sambuc	movvc		r0, #0		/* isnan(a) || isnan(b) */
271*84d9c625SLionel Sambuc	RET
272*84d9c625SLionel SambucEND(__unorddf2)
273*84d9c625SLionel Sambuc#endif /* !__ARM_EABI__ */
274