1 /* Description of builtins used by the ARM backend.
2    Copyright (C) 2014-2019 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published
8    by the Free Software Foundation; either version 3, or (at your
9    option) any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #define IN_TARGET_CODE 1
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "function.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple-expr.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "profile-count.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "langhooks.h"
42 #include "case-cfn-macros.h"
43 #include "sbitmap.h"
44 
45 #define SIMD_MAX_BUILTIN_ARGS 7
46 
47 enum arm_type_qualifiers
48 {
49   /* T foo.  */
50   qualifier_none = 0x0,
51   /* unsigned T foo.  */
52   qualifier_unsigned = 0x1, /* 1 << 0  */
53   /* const T foo.  */
54   qualifier_const = 0x2, /* 1 << 1  */
55   /* T *foo.  */
56   qualifier_pointer = 0x4, /* 1 << 2  */
57   /* const T * foo.  */
58   qualifier_const_pointer = 0x6,
59   /* Used when expanding arguments if an operand could
60      be an immediate.  */
61   qualifier_immediate = 0x8, /* 1 << 3  */
62   qualifier_unsigned_immediate = 0x9,
63   qualifier_maybe_immediate = 0x10, /* 1 << 4  */
64   /* void foo (...).  */
65   qualifier_void = 0x20, /* 1 << 5  */
66   /* Some patterns may have internal operands, this qualifier is an
67      instruction to the initialisation code to skip this operand.  */
68   qualifier_internal = 0x40, /* 1 << 6  */
69   /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
70      rather than using the type of the operand.  */
71   qualifier_map_mode = 0x80, /* 1 << 7  */
72   /* qualifier_pointer | qualifier_map_mode  */
73   qualifier_pointer_map_mode = 0x84,
74   /* qualifier_const_pointer | qualifier_map_mode  */
75   qualifier_const_pointer_map_mode = 0x86,
76   /* Polynomial types.  */
77   qualifier_poly = 0x100,
78   /* Lane indices - must be within range of previous argument = a vector.  */
79   qualifier_lane_index = 0x200,
80   /* Lane indices for single lane structure loads and stores.  */
81   qualifier_struct_load_store_lane_index = 0x400,
82   /* A void pointer.  */
83   qualifier_void_pointer = 0x800,
84   /* A const void pointer.  */
85   qualifier_const_void_pointer = 0x802,
86   /* Lane indices selected in pairs - must be within range of previous
87      argument = a vector.  */
88   qualifier_lane_pair_index = 0x1000
89 };
90 
91 /*  The qualifier_internal allows generation of a unary builtin from
92     a pattern with a third pseudo-operand such as a match_scratch.
93     T (T).  */
94 static enum arm_type_qualifiers
95 arm_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
96   = { qualifier_none, qualifier_none, qualifier_internal };
97 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
98 
99 /* unsigned T (unsigned T).  */
100 static enum arm_type_qualifiers
101 arm_bswap_qualifiers[SIMD_MAX_BUILTIN_ARGS]
102   = { qualifier_unsigned, qualifier_unsigned };
103 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
104 
105 /* T (T, T [maybe_immediate]).  */
106 static enum arm_type_qualifiers
107 arm_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
108   = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
109 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
110 
111 /* T (T, T, T).  */
112 static enum arm_type_qualifiers
113 arm_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
114   = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
115 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
116 
117 /* unsigned T (unsigned T, unsigned T, unsigned T).  */
118 static enum arm_type_qualifiers
119 arm_unsigned_uternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
120   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
121       qualifier_unsigned };
122 #define UTERNOP_QUALIFIERS (arm_unsigned_uternop_qualifiers)
123 
124 /* T (T, immediate).  */
125 static enum arm_type_qualifiers
126 arm_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
127   = { qualifier_none, qualifier_none, qualifier_immediate };
128 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
129 
130 /* T (T, lane index).  */
131 static enum arm_type_qualifiers
132 arm_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
133   = { qualifier_none, qualifier_none, qualifier_lane_index };
134 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
135 
136 /* T (T, T, T, immediate).  */
137 static enum arm_type_qualifiers
138 arm_mac_n_qualifiers[SIMD_MAX_BUILTIN_ARGS]
139   = { qualifier_none, qualifier_none, qualifier_none,
140       qualifier_none, qualifier_immediate };
141 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
142 
143 /* T (T, T, T, lane index).  */
144 static enum arm_type_qualifiers
145 arm_mac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
146   = { qualifier_none, qualifier_none, qualifier_none,
147       qualifier_none, qualifier_lane_index };
148 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
149 
150 /* T (T, T, T, lane pair index).  */
151 static enum arm_type_qualifiers
152 arm_mac_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
153   = { qualifier_none, qualifier_none, qualifier_none,
154       qualifier_none, qualifier_lane_pair_index };
155 #define MAC_LANE_PAIR_QUALIFIERS (arm_mac_lane_pair_qualifiers)
156 
157 /* unsigned T (unsigned T, unsigned T, unsigend T, lane index).  */
158 static enum arm_type_qualifiers
159 arm_umac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
160   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
161       qualifier_unsigned, qualifier_lane_index };
162 #define UMAC_LANE_QUALIFIERS (arm_umac_lane_qualifiers)
163 
164 /* T (T, T, immediate).  */
165 static enum arm_type_qualifiers
166 arm_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
167   = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
168 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
169 
170 /* T (T, T, lane index).  */
171 static enum arm_type_qualifiers
172 arm_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
173   = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
174 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
175 
176 /* T (T, T).  */
177 static enum arm_type_qualifiers
178 arm_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
179   = { qualifier_none, qualifier_none, qualifier_none };
180 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
181 
182 /* T ([T element type] *).  */
183 static enum arm_type_qualifiers
184 arm_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
185   = { qualifier_none, qualifier_const_pointer_map_mode };
186 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
187 
188 /* T ([T element type] *, T, immediate).  */
189 static enum arm_type_qualifiers
190 arm_load1_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
191   = { qualifier_none, qualifier_const_pointer_map_mode,
192       qualifier_none, qualifier_struct_load_store_lane_index };
193 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
194 
195 /* unsigned T (unsigned T, unsigned T, unsigned T).  */
196 static enum arm_type_qualifiers
197 arm_unsigned_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
198   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
199       qualifier_unsigned };
200 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
201 
202 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
203 	 unsigned immediate, unsigned immediate, unsigned immediate).  */
204 static enum arm_type_qualifiers
205 arm_cdp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
206   = { qualifier_void, qualifier_unsigned_immediate,
207       qualifier_unsigned_immediate,
208       qualifier_unsigned_immediate,
209       qualifier_unsigned_immediate,
210       qualifier_unsigned_immediate,
211       qualifier_unsigned_immediate };
212 #define CDP_QUALIFIERS \
213   (arm_cdp_qualifiers)
214 
215 /* void (unsigned immediate, unsigned immediate,  const void *).  */
216 static enum arm_type_qualifiers
217 arm_ldc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
218   = { qualifier_void, qualifier_unsigned_immediate,
219       qualifier_unsigned_immediate, qualifier_const_void_pointer };
220 #define LDC_QUALIFIERS \
221   (arm_ldc_qualifiers)
222 
223 /* void (unsigned immediate, unsigned immediate,  void *).  */
224 static enum arm_type_qualifiers
225 arm_stc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
226   = { qualifier_void, qualifier_unsigned_immediate,
227       qualifier_unsigned_immediate, qualifier_void_pointer };
228 #define STC_QUALIFIERS \
229   (arm_stc_qualifiers)
230 
231 /* void (unsigned immediate, unsigned immediate,  T, unsigned immediate,
232 	 unsigned immediate, unsigned immediate).  */
233 static enum arm_type_qualifiers
234 arm_mcr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
235   = { qualifier_void, qualifier_unsigned_immediate,
236       qualifier_unsigned_immediate, qualifier_none,
237       qualifier_unsigned_immediate, qualifier_unsigned_immediate,
238       qualifier_unsigned_immediate };
239 #define MCR_QUALIFIERS \
240   (arm_mcr_qualifiers)
241 
242 /* T (unsigned immediate, unsigned immediate, unsigned immediate,
243       unsigned immediate, unsigned immediate).  */
244 static enum arm_type_qualifiers
245 arm_mrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
246   = { qualifier_none, qualifier_unsigned_immediate,
247       qualifier_unsigned_immediate, qualifier_unsigned_immediate,
248       qualifier_unsigned_immediate, qualifier_unsigned_immediate };
249 #define MRC_QUALIFIERS \
250   (arm_mrc_qualifiers)
251 
252 /* void (unsigned immediate, unsigned immediate,  T, unsigned immediate).  */
253 static enum arm_type_qualifiers
254 arm_mcrr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
255   = { qualifier_void, qualifier_unsigned_immediate,
256       qualifier_unsigned_immediate, qualifier_none,
257       qualifier_unsigned_immediate };
258 #define MCRR_QUALIFIERS \
259   (arm_mcrr_qualifiers)
260 
261 /* T (unsigned immediate, unsigned immediate, unsigned immediate).  */
262 static enum arm_type_qualifiers
263 arm_mrrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
264   = { qualifier_none, qualifier_unsigned_immediate,
265       qualifier_unsigned_immediate, qualifier_unsigned_immediate };
266 #define MRRC_QUALIFIERS \
267   (arm_mrrc_qualifiers)
268 
269 /* The first argument (return type) of a store should be void type,
270    which we represent with qualifier_void.  Their first operand will be
271    a DImode pointer to the location to store to, so we must use
272    qualifier_map_mode | qualifier_pointer to build a pointer to the
273    element type of the vector.
274 
275    void ([T element type] *, T).  */
276 static enum arm_type_qualifiers
277 arm_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
278   = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
279 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
280 
281    /* void ([T element type] *, T, immediate).  */
282 static enum arm_type_qualifiers
283 arm_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
284   = { qualifier_void, qualifier_pointer_map_mode,
285       qualifier_none, qualifier_struct_load_store_lane_index };
286 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
287 
288 #define v8qi_UP  E_V8QImode
289 #define v4hi_UP  E_V4HImode
290 #define v4hf_UP  E_V4HFmode
291 #define v2si_UP  E_V2SImode
292 #define v2sf_UP  E_V2SFmode
293 #define di_UP    E_DImode
294 #define v16qi_UP E_V16QImode
295 #define v8hi_UP  E_V8HImode
296 #define v8hf_UP  E_V8HFmode
297 #define v4si_UP  E_V4SImode
298 #define v4sf_UP  E_V4SFmode
299 #define v2di_UP  E_V2DImode
300 #define ti_UP	 E_TImode
301 #define ei_UP	 E_EImode
302 #define oi_UP	 E_OImode
303 #define hf_UP	 E_HFmode
304 #define si_UP	 E_SImode
305 #define void_UP	 E_VOIDmode
306 
307 #define UP(X) X##_UP
308 
309 typedef struct {
310   const char *name;
311   machine_mode mode;
312   const enum insn_code code;
313   unsigned int fcode;
314   enum arm_type_qualifiers *qualifiers;
315 } arm_builtin_datum;
316 
317 #define CF(N,X) CODE_FOR_neon_##N##X
318 
319 #define VAR1(T, N, A) \
320   {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
321 #define VAR2(T, N, A, B) \
322   VAR1 (T, N, A) \
323   VAR1 (T, N, B)
324 #define VAR3(T, N, A, B, C) \
325   VAR2 (T, N, A, B) \
326   VAR1 (T, N, C)
327 #define VAR4(T, N, A, B, C, D) \
328   VAR3 (T, N, A, B, C) \
329   VAR1 (T, N, D)
330 #define VAR5(T, N, A, B, C, D, E) \
331   VAR4 (T, N, A, B, C, D) \
332   VAR1 (T, N, E)
333 #define VAR6(T, N, A, B, C, D, E, F) \
334   VAR5 (T, N, A, B, C, D, E) \
335   VAR1 (T, N, F)
336 #define VAR7(T, N, A, B, C, D, E, F, G) \
337   VAR6 (T, N, A, B, C, D, E, F) \
338   VAR1 (T, N, G)
339 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
340   VAR7 (T, N, A, B, C, D, E, F, G) \
341   VAR1 (T, N, H)
342 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
343   VAR8 (T, N, A, B, C, D, E, F, G, H) \
344   VAR1 (T, N, I)
345 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
346   VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
347   VAR1 (T, N, J)
348 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
349   VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
350   VAR1 (T, N, K)
351 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
352   VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
353   VAR1 (T, N, L)
354 
355 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
356    and arm_acle_builtins.def.  The entries in arm_neon_builtins.def require
357    TARGET_NEON to be true.  The feature tests are checked when the builtins are
358    expanded.
359 
360    The mode entries in the following table correspond to the "key" type of the
361    instruction variant, i.e. equivalent to that which would be specified after
362    the assembler mnemonic for neon instructions, which usually refers to the
363    last vector operand.  The modes listed per instruction should be the same as
364    those defined for that instruction's pattern, for instance in neon.md.  */
365 
366 static arm_builtin_datum vfp_builtin_data[] =
367 {
368 #include "arm_vfp_builtins.def"
369 };
370 
371 static arm_builtin_datum neon_builtin_data[] =
372 {
373 #include "arm_neon_builtins.def"
374 };
375 
376 #undef CF
377 #undef VAR1
378 #define VAR1(T, N, A) \
379   {#N, UP (A), CODE_FOR_##N, 0, T##_QUALIFIERS},
380 
381 static arm_builtin_datum acle_builtin_data[] =
382 {
383 #include "arm_acle_builtins.def"
384 };
385 
386 #undef VAR1
387 
388 #define VAR1(T, N, X) \
389   ARM_BUILTIN_NEON_##N##X,
390 
391 enum arm_builtins
392 {
393   ARM_BUILTIN_GETWCGR0,
394   ARM_BUILTIN_GETWCGR1,
395   ARM_BUILTIN_GETWCGR2,
396   ARM_BUILTIN_GETWCGR3,
397 
398   ARM_BUILTIN_SETWCGR0,
399   ARM_BUILTIN_SETWCGR1,
400   ARM_BUILTIN_SETWCGR2,
401   ARM_BUILTIN_SETWCGR3,
402 
403   ARM_BUILTIN_WZERO,
404 
405   ARM_BUILTIN_WAVG2BR,
406   ARM_BUILTIN_WAVG2HR,
407   ARM_BUILTIN_WAVG2B,
408   ARM_BUILTIN_WAVG2H,
409 
410   ARM_BUILTIN_WACCB,
411   ARM_BUILTIN_WACCH,
412   ARM_BUILTIN_WACCW,
413 
414   ARM_BUILTIN_WMACS,
415   ARM_BUILTIN_WMACSZ,
416   ARM_BUILTIN_WMACU,
417   ARM_BUILTIN_WMACUZ,
418 
419   ARM_BUILTIN_WSADB,
420   ARM_BUILTIN_WSADBZ,
421   ARM_BUILTIN_WSADH,
422   ARM_BUILTIN_WSADHZ,
423 
424   ARM_BUILTIN_WALIGNI,
425   ARM_BUILTIN_WALIGNR0,
426   ARM_BUILTIN_WALIGNR1,
427   ARM_BUILTIN_WALIGNR2,
428   ARM_BUILTIN_WALIGNR3,
429 
430   ARM_BUILTIN_TMIA,
431   ARM_BUILTIN_TMIAPH,
432   ARM_BUILTIN_TMIABB,
433   ARM_BUILTIN_TMIABT,
434   ARM_BUILTIN_TMIATB,
435   ARM_BUILTIN_TMIATT,
436 
437   ARM_BUILTIN_TMOVMSKB,
438   ARM_BUILTIN_TMOVMSKH,
439   ARM_BUILTIN_TMOVMSKW,
440 
441   ARM_BUILTIN_TBCSTB,
442   ARM_BUILTIN_TBCSTH,
443   ARM_BUILTIN_TBCSTW,
444 
445   ARM_BUILTIN_WMADDS,
446   ARM_BUILTIN_WMADDU,
447 
448   ARM_BUILTIN_WPACKHSS,
449   ARM_BUILTIN_WPACKWSS,
450   ARM_BUILTIN_WPACKDSS,
451   ARM_BUILTIN_WPACKHUS,
452   ARM_BUILTIN_WPACKWUS,
453   ARM_BUILTIN_WPACKDUS,
454 
455   ARM_BUILTIN_WADDB,
456   ARM_BUILTIN_WADDH,
457   ARM_BUILTIN_WADDW,
458   ARM_BUILTIN_WADDSSB,
459   ARM_BUILTIN_WADDSSH,
460   ARM_BUILTIN_WADDSSW,
461   ARM_BUILTIN_WADDUSB,
462   ARM_BUILTIN_WADDUSH,
463   ARM_BUILTIN_WADDUSW,
464   ARM_BUILTIN_WSUBB,
465   ARM_BUILTIN_WSUBH,
466   ARM_BUILTIN_WSUBW,
467   ARM_BUILTIN_WSUBSSB,
468   ARM_BUILTIN_WSUBSSH,
469   ARM_BUILTIN_WSUBSSW,
470   ARM_BUILTIN_WSUBUSB,
471   ARM_BUILTIN_WSUBUSH,
472   ARM_BUILTIN_WSUBUSW,
473 
474   ARM_BUILTIN_WAND,
475   ARM_BUILTIN_WANDN,
476   ARM_BUILTIN_WOR,
477   ARM_BUILTIN_WXOR,
478 
479   ARM_BUILTIN_WCMPEQB,
480   ARM_BUILTIN_WCMPEQH,
481   ARM_BUILTIN_WCMPEQW,
482   ARM_BUILTIN_WCMPGTUB,
483   ARM_BUILTIN_WCMPGTUH,
484   ARM_BUILTIN_WCMPGTUW,
485   ARM_BUILTIN_WCMPGTSB,
486   ARM_BUILTIN_WCMPGTSH,
487   ARM_BUILTIN_WCMPGTSW,
488 
489   ARM_BUILTIN_TEXTRMSB,
490   ARM_BUILTIN_TEXTRMSH,
491   ARM_BUILTIN_TEXTRMSW,
492   ARM_BUILTIN_TEXTRMUB,
493   ARM_BUILTIN_TEXTRMUH,
494   ARM_BUILTIN_TEXTRMUW,
495   ARM_BUILTIN_TINSRB,
496   ARM_BUILTIN_TINSRH,
497   ARM_BUILTIN_TINSRW,
498 
499   ARM_BUILTIN_WMAXSW,
500   ARM_BUILTIN_WMAXSH,
501   ARM_BUILTIN_WMAXSB,
502   ARM_BUILTIN_WMAXUW,
503   ARM_BUILTIN_WMAXUH,
504   ARM_BUILTIN_WMAXUB,
505   ARM_BUILTIN_WMINSW,
506   ARM_BUILTIN_WMINSH,
507   ARM_BUILTIN_WMINSB,
508   ARM_BUILTIN_WMINUW,
509   ARM_BUILTIN_WMINUH,
510   ARM_BUILTIN_WMINUB,
511 
512   ARM_BUILTIN_WMULUM,
513   ARM_BUILTIN_WMULSM,
514   ARM_BUILTIN_WMULUL,
515 
516   ARM_BUILTIN_PSADBH,
517   ARM_BUILTIN_WSHUFH,
518 
519   ARM_BUILTIN_WSLLH,
520   ARM_BUILTIN_WSLLW,
521   ARM_BUILTIN_WSLLD,
522   ARM_BUILTIN_WSRAH,
523   ARM_BUILTIN_WSRAW,
524   ARM_BUILTIN_WSRAD,
525   ARM_BUILTIN_WSRLH,
526   ARM_BUILTIN_WSRLW,
527   ARM_BUILTIN_WSRLD,
528   ARM_BUILTIN_WRORH,
529   ARM_BUILTIN_WRORW,
530   ARM_BUILTIN_WRORD,
531   ARM_BUILTIN_WSLLHI,
532   ARM_BUILTIN_WSLLWI,
533   ARM_BUILTIN_WSLLDI,
534   ARM_BUILTIN_WSRAHI,
535   ARM_BUILTIN_WSRAWI,
536   ARM_BUILTIN_WSRADI,
537   ARM_BUILTIN_WSRLHI,
538   ARM_BUILTIN_WSRLWI,
539   ARM_BUILTIN_WSRLDI,
540   ARM_BUILTIN_WRORHI,
541   ARM_BUILTIN_WRORWI,
542   ARM_BUILTIN_WRORDI,
543 
544   ARM_BUILTIN_WUNPCKIHB,
545   ARM_BUILTIN_WUNPCKIHH,
546   ARM_BUILTIN_WUNPCKIHW,
547   ARM_BUILTIN_WUNPCKILB,
548   ARM_BUILTIN_WUNPCKILH,
549   ARM_BUILTIN_WUNPCKILW,
550 
551   ARM_BUILTIN_WUNPCKEHSB,
552   ARM_BUILTIN_WUNPCKEHSH,
553   ARM_BUILTIN_WUNPCKEHSW,
554   ARM_BUILTIN_WUNPCKEHUB,
555   ARM_BUILTIN_WUNPCKEHUH,
556   ARM_BUILTIN_WUNPCKEHUW,
557   ARM_BUILTIN_WUNPCKELSB,
558   ARM_BUILTIN_WUNPCKELSH,
559   ARM_BUILTIN_WUNPCKELSW,
560   ARM_BUILTIN_WUNPCKELUB,
561   ARM_BUILTIN_WUNPCKELUH,
562   ARM_BUILTIN_WUNPCKELUW,
563 
564   ARM_BUILTIN_WABSB,
565   ARM_BUILTIN_WABSH,
566   ARM_BUILTIN_WABSW,
567 
568   ARM_BUILTIN_WADDSUBHX,
569   ARM_BUILTIN_WSUBADDHX,
570 
571   ARM_BUILTIN_WABSDIFFB,
572   ARM_BUILTIN_WABSDIFFH,
573   ARM_BUILTIN_WABSDIFFW,
574 
575   ARM_BUILTIN_WADDCH,
576   ARM_BUILTIN_WADDCW,
577 
578   ARM_BUILTIN_WAVG4,
579   ARM_BUILTIN_WAVG4R,
580 
581   ARM_BUILTIN_WMADDSX,
582   ARM_BUILTIN_WMADDUX,
583 
584   ARM_BUILTIN_WMADDSN,
585   ARM_BUILTIN_WMADDUN,
586 
587   ARM_BUILTIN_WMULWSM,
588   ARM_BUILTIN_WMULWUM,
589 
590   ARM_BUILTIN_WMULWSMR,
591   ARM_BUILTIN_WMULWUMR,
592 
593   ARM_BUILTIN_WMULWL,
594 
595   ARM_BUILTIN_WMULSMR,
596   ARM_BUILTIN_WMULUMR,
597 
598   ARM_BUILTIN_WQMULM,
599   ARM_BUILTIN_WQMULMR,
600 
601   ARM_BUILTIN_WQMULWM,
602   ARM_BUILTIN_WQMULWMR,
603 
604   ARM_BUILTIN_WADDBHUSM,
605   ARM_BUILTIN_WADDBHUSL,
606 
607   ARM_BUILTIN_WQMIABB,
608   ARM_BUILTIN_WQMIABT,
609   ARM_BUILTIN_WQMIATB,
610   ARM_BUILTIN_WQMIATT,
611 
612   ARM_BUILTIN_WQMIABBN,
613   ARM_BUILTIN_WQMIABTN,
614   ARM_BUILTIN_WQMIATBN,
615   ARM_BUILTIN_WQMIATTN,
616 
617   ARM_BUILTIN_WMIABB,
618   ARM_BUILTIN_WMIABT,
619   ARM_BUILTIN_WMIATB,
620   ARM_BUILTIN_WMIATT,
621 
622   ARM_BUILTIN_WMIABBN,
623   ARM_BUILTIN_WMIABTN,
624   ARM_BUILTIN_WMIATBN,
625   ARM_BUILTIN_WMIATTN,
626 
627   ARM_BUILTIN_WMIAWBB,
628   ARM_BUILTIN_WMIAWBT,
629   ARM_BUILTIN_WMIAWTB,
630   ARM_BUILTIN_WMIAWTT,
631 
632   ARM_BUILTIN_WMIAWBBN,
633   ARM_BUILTIN_WMIAWBTN,
634   ARM_BUILTIN_WMIAWTBN,
635   ARM_BUILTIN_WMIAWTTN,
636 
637   ARM_BUILTIN_WMERGE,
638 
639   ARM_BUILTIN_GET_FPSCR,
640   ARM_BUILTIN_SET_FPSCR,
641 
642   ARM_BUILTIN_CMSE_NONSECURE_CALLER,
643 
644 #undef CRYPTO1
645 #undef CRYPTO2
646 #undef CRYPTO3
647 
648 #define CRYPTO1(L, U, M1, M2) \
649   ARM_BUILTIN_CRYPTO_##U,
650 #define CRYPTO2(L, U, M1, M2, M3) \
651   ARM_BUILTIN_CRYPTO_##U,
652 #define CRYPTO3(L, U, M1, M2, M3, M4) \
653   ARM_BUILTIN_CRYPTO_##U,
654 
655   ARM_BUILTIN_CRYPTO_BASE,
656 
657 #include "crypto.def"
658 
659 #undef CRYPTO1
660 #undef CRYPTO2
661 #undef CRYPTO3
662 
663   ARM_BUILTIN_VFP_BASE,
664 
665 #include "arm_vfp_builtins.def"
666 
667   ARM_BUILTIN_NEON_BASE,
668   ARM_BUILTIN_NEON_LANE_CHECK = ARM_BUILTIN_NEON_BASE,
669 
670 #include "arm_neon_builtins.def"
671 
672 #undef VAR1
673 #define VAR1(T, N, X) \
674   ARM_BUILTIN_##N,
675 
676   ARM_BUILTIN_ACLE_BASE,
677 
678 #include "arm_acle_builtins.def"
679 
680   ARM_BUILTIN_MAX
681 };
682 
683 #define ARM_BUILTIN_VFP_PATTERN_START \
684   (ARM_BUILTIN_VFP_BASE + 1)
685 
686 #define ARM_BUILTIN_NEON_PATTERN_START \
687   (ARM_BUILTIN_NEON_BASE + 1)
688 
689 #define ARM_BUILTIN_ACLE_PATTERN_START \
690   (ARM_BUILTIN_ACLE_BASE + 1)
691 
692 #undef CF
693 #undef VAR1
694 #undef VAR2
695 #undef VAR3
696 #undef VAR4
697 #undef VAR5
698 #undef VAR6
699 #undef VAR7
700 #undef VAR8
701 #undef VAR9
702 #undef VAR10
703 
704 static GTY(()) tree arm_builtin_decls[ARM_BUILTIN_MAX];
705 
706 #define NUM_DREG_TYPES 5
707 #define NUM_QREG_TYPES 6
708 
709 /* Internal scalar builtin types.  These types are used to support
710    neon intrinsic builtins.  They are _not_ user-visible types.  Therefore
711    the mangling for these types are implementation defined.  */
712 const char *arm_scalar_builtin_types[] = {
713   "__builtin_neon_qi",
714   "__builtin_neon_hi",
715   "__builtin_neon_si",
716   "__builtin_neon_sf",
717   "__builtin_neon_di",
718   "__builtin_neon_df",
719   "__builtin_neon_ti",
720   "__builtin_neon_uqi",
721   "__builtin_neon_uhi",
722   "__builtin_neon_usi",
723   "__builtin_neon_udi",
724   "__builtin_neon_ei",
725   "__builtin_neon_oi",
726   "__builtin_neon_ci",
727   "__builtin_neon_xi",
728   NULL
729 };
730 
731 #define ENTRY(E, M, Q, S, T, G) E,
732 enum arm_simd_type
733 {
734 #include "arm-simd-builtin-types.def"
735   __TYPE_FINAL
736 };
737 #undef ENTRY
738 
739 struct arm_simd_type_info
740 {
741   enum arm_simd_type type;
742 
743   /* Internal type name.  */
744   const char *name;
745 
746   /* Internal type name(mangled).  The mangled names conform to the
747      AAPCS (see "Procedure Call Standard for the ARM Architecture",
748      Appendix A).  To qualify for emission with the mangled names defined in
749      that document, a vector type must not only be of the correct mode but also
750      be of the correct internal Neon vector type (e.g. __simd64_int8_t);
751      these types are registered by arm_init_simd_builtin_types ().  In other
752      words, vector types defined in other ways e.g. via vector_size attribute
753      will get default mangled names.  */
754   const char *mangle;
755 
756   /* Internal type.  */
757   tree itype;
758 
759   /* Element type.  */
760   tree eltype;
761 
762   /* Machine mode the internal type maps to.  */
763   machine_mode mode;
764 
765   /* Qualifiers.  */
766   enum arm_type_qualifiers q;
767 };
768 
769 #define ENTRY(E, M, Q, S, T, G)		\
770   {E,					\
771    "__simd" #S "_" #T "_t",		\
772    #G "__simd" #S "_" #T "_t",		\
773    NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
774 static struct arm_simd_type_info arm_simd_types [] = {
775 #include "arm-simd-builtin-types.def"
776 };
777 #undef ENTRY
778 
779 /* The user-visible __fp16 type.  */
780 tree arm_fp16_type_node = NULL_TREE;
781 static tree arm_simd_intOI_type_node = NULL_TREE;
782 static tree arm_simd_intEI_type_node = NULL_TREE;
783 static tree arm_simd_intCI_type_node = NULL_TREE;
784 static tree arm_simd_intXI_type_node = NULL_TREE;
785 static tree arm_simd_polyQI_type_node = NULL_TREE;
786 static tree arm_simd_polyHI_type_node = NULL_TREE;
787 static tree arm_simd_polyDI_type_node = NULL_TREE;
788 static tree arm_simd_polyTI_type_node = NULL_TREE;
789 
790 static const char *
arm_mangle_builtin_scalar_type(const_tree type)791 arm_mangle_builtin_scalar_type (const_tree type)
792 {
793   int i = 0;
794 
795   while (arm_scalar_builtin_types[i] != NULL)
796     {
797       const char *name = arm_scalar_builtin_types[i];
798 
799       if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
800 	  && DECL_NAME (TYPE_NAME (type))
801 	  && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
802 	return arm_scalar_builtin_types[i];
803       i++;
804     }
805   return NULL;
806 }
807 
808 static const char *
arm_mangle_builtin_vector_type(const_tree type)809 arm_mangle_builtin_vector_type (const_tree type)
810 {
811   int i;
812   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
813 
814   for (i = 0; i < nelts; i++)
815     if (arm_simd_types[i].mode ==  TYPE_MODE (type)
816 	&& TYPE_NAME (type)
817 	&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
818 	&& DECL_NAME (TYPE_NAME (type))
819 	&& !strcmp
820 	     (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
821 	      arm_simd_types[i].name))
822       return arm_simd_types[i].mangle;
823 
824   return NULL;
825 }
826 
827 const char *
arm_mangle_builtin_type(const_tree type)828 arm_mangle_builtin_type (const_tree type)
829 {
830   const char *mangle;
831   /* Walk through all the AArch64 builtins types tables to filter out the
832      incoming type.  */
833   if ((mangle = arm_mangle_builtin_vector_type (type))
834       || (mangle = arm_mangle_builtin_scalar_type (type)))
835     return mangle;
836 
837   return NULL;
838 }
839 
840 static tree
arm_simd_builtin_std_type(machine_mode mode,enum arm_type_qualifiers q)841 arm_simd_builtin_std_type (machine_mode mode,
842 			   enum arm_type_qualifiers q)
843 {
844 #define QUAL_TYPE(M)  \
845   ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
846   switch (mode)
847     {
848     case E_QImode:
849       return QUAL_TYPE (QI);
850     case E_HImode:
851       return QUAL_TYPE (HI);
852     case E_SImode:
853       return QUAL_TYPE (SI);
854     case E_DImode:
855       return QUAL_TYPE (DI);
856     case E_TImode:
857       return QUAL_TYPE (TI);
858     case E_OImode:
859       return arm_simd_intOI_type_node;
860     case E_EImode:
861       return arm_simd_intEI_type_node;
862     case E_CImode:
863       return arm_simd_intCI_type_node;
864     case E_XImode:
865       return arm_simd_intXI_type_node;
866     case E_HFmode:
867       return arm_fp16_type_node;
868     case E_SFmode:
869       return float_type_node;
870     case E_DFmode:
871       return double_type_node;
872     default:
873       gcc_unreachable ();
874     }
875 #undef QUAL_TYPE
876 }
877 
878 static tree
arm_lookup_simd_builtin_type(machine_mode mode,enum arm_type_qualifiers q)879 arm_lookup_simd_builtin_type (machine_mode mode,
880 			      enum arm_type_qualifiers q)
881 {
882   int i;
883   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
884 
885   /* Non-poly scalar modes map to standard types not in the table.  */
886   if (q != qualifier_poly && !VECTOR_MODE_P (mode))
887     return arm_simd_builtin_std_type (mode, q);
888 
889   for (i = 0; i < nelts; i++)
890     if (arm_simd_types[i].mode == mode
891 	&& arm_simd_types[i].q == q)
892       return arm_simd_types[i].itype;
893 
894   /* Note that we won't have caught the underlying type for poly64x2_t
895      in the above table.  This gets default mangling.  */
896 
897   return NULL_TREE;
898 }
899 
900 static tree
arm_simd_builtin_type(machine_mode mode,bool unsigned_p,bool poly_p)901 arm_simd_builtin_type (machine_mode mode, bool unsigned_p, bool poly_p)
902 {
903   if (poly_p)
904     return arm_lookup_simd_builtin_type (mode, qualifier_poly);
905   else if (unsigned_p)
906     return arm_lookup_simd_builtin_type (mode, qualifier_unsigned);
907   else
908     return arm_lookup_simd_builtin_type (mode, qualifier_none);
909 }
910 
911 static void
arm_init_simd_builtin_types(void)912 arm_init_simd_builtin_types (void)
913 {
914   int i;
915   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
916   tree tdecl;
917 
918   /* Poly types are a world of their own.  In order to maintain legacy
919      ABI, they get initialized using the old interface, and don't get
920      an entry in our mangling table, consequently, they get default
921      mangling.  As a further gotcha, poly8_t and poly16_t are signed
922      types, poly64_t and poly128_t are unsigned types.  */
923   arm_simd_polyQI_type_node
924     = build_distinct_type_copy (intQI_type_node);
925   (*lang_hooks.types.register_builtin_type) (arm_simd_polyQI_type_node,
926 					     "__builtin_neon_poly8");
927   arm_simd_polyHI_type_node
928     = build_distinct_type_copy (intHI_type_node);
929   (*lang_hooks.types.register_builtin_type) (arm_simd_polyHI_type_node,
930 					     "__builtin_neon_poly16");
931   arm_simd_polyDI_type_node
932     = build_distinct_type_copy (unsigned_intDI_type_node);
933   (*lang_hooks.types.register_builtin_type) (arm_simd_polyDI_type_node,
934 					     "__builtin_neon_poly64");
935   arm_simd_polyTI_type_node
936     = build_distinct_type_copy (unsigned_intTI_type_node);
937   (*lang_hooks.types.register_builtin_type) (arm_simd_polyTI_type_node,
938 					     "__builtin_neon_poly128");
939 
940   /* Prevent front-ends from transforming poly vectors into string
941      literals.  */
942   TYPE_STRING_FLAG (arm_simd_polyQI_type_node) = false;
943   TYPE_STRING_FLAG (arm_simd_polyHI_type_node) = false;
944 
945   /* Init all the element types built by the front-end.  */
946   arm_simd_types[Int8x8_t].eltype = intQI_type_node;
947   arm_simd_types[Int8x16_t].eltype = intQI_type_node;
948   arm_simd_types[Int16x4_t].eltype = intHI_type_node;
949   arm_simd_types[Int16x8_t].eltype = intHI_type_node;
950   arm_simd_types[Int32x2_t].eltype = intSI_type_node;
951   arm_simd_types[Int32x4_t].eltype = intSI_type_node;
952   arm_simd_types[Int64x2_t].eltype = intDI_type_node;
953   arm_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
954   arm_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
955   arm_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
956   arm_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
957   arm_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
958   arm_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
959   arm_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
960 
961   /* Init poly vector element types with scalar poly types.  */
962   arm_simd_types[Poly8x8_t].eltype = arm_simd_polyQI_type_node;
963   arm_simd_types[Poly8x16_t].eltype = arm_simd_polyQI_type_node;
964   arm_simd_types[Poly16x4_t].eltype = arm_simd_polyHI_type_node;
965   arm_simd_types[Poly16x8_t].eltype = arm_simd_polyHI_type_node;
966   /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
967      mangling.  */
968 
969   /* Continue with standard types.  */
970   /* The __builtin_simd{64,128}_float16 types are kept private unless
971      we have a scalar __fp16 type.  */
972   arm_simd_types[Float16x4_t].eltype = arm_fp16_type_node;
973   arm_simd_types[Float16x8_t].eltype = arm_fp16_type_node;
974   arm_simd_types[Float32x2_t].eltype = float_type_node;
975   arm_simd_types[Float32x4_t].eltype = float_type_node;
976 
977   for (i = 0; i < nelts; i++)
978     {
979       tree eltype = arm_simd_types[i].eltype;
980       machine_mode mode = arm_simd_types[i].mode;
981 
982       if (arm_simd_types[i].itype == NULL)
983 	arm_simd_types[i].itype =
984 	  build_distinct_type_copy
985 	    (build_vector_type (eltype, GET_MODE_NUNITS (mode)));
986 
987       tdecl = add_builtin_type (arm_simd_types[i].name,
988 				arm_simd_types[i].itype);
989       TYPE_NAME (arm_simd_types[i].itype) = tdecl;
990       SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types[i].itype);
991     }
992 
993 #define AARCH_BUILD_SIGNED_TYPE(mode)  \
994   make_signed_type (GET_MODE_PRECISION (mode));
995   arm_simd_intOI_type_node = AARCH_BUILD_SIGNED_TYPE (OImode);
996   arm_simd_intEI_type_node = AARCH_BUILD_SIGNED_TYPE (EImode);
997   arm_simd_intCI_type_node = AARCH_BUILD_SIGNED_TYPE (CImode);
998   arm_simd_intXI_type_node = AARCH_BUILD_SIGNED_TYPE (XImode);
999 #undef AARCH_BUILD_SIGNED_TYPE
1000 
1001   tdecl = add_builtin_type
1002 	    ("__builtin_neon_ei" , arm_simd_intEI_type_node);
1003   TYPE_NAME (arm_simd_intEI_type_node) = tdecl;
1004   tdecl = add_builtin_type
1005 	    ("__builtin_neon_oi" , arm_simd_intOI_type_node);
1006   TYPE_NAME (arm_simd_intOI_type_node) = tdecl;
1007   tdecl = add_builtin_type
1008 	    ("__builtin_neon_ci" , arm_simd_intCI_type_node);
1009   TYPE_NAME (arm_simd_intCI_type_node) = tdecl;
1010   tdecl = add_builtin_type
1011 	    ("__builtin_neon_xi" , arm_simd_intXI_type_node);
1012   TYPE_NAME (arm_simd_intXI_type_node) = tdecl;
1013 }
1014 
1015 static void
arm_init_simd_builtin_scalar_types(void)1016 arm_init_simd_builtin_scalar_types (void)
1017 {
1018   /* Define typedefs for all the standard scalar types.  */
1019   (*lang_hooks.types.register_builtin_type) (intQI_type_node,
1020 					     "__builtin_neon_qi");
1021   (*lang_hooks.types.register_builtin_type) (intHI_type_node,
1022 					     "__builtin_neon_hi");
1023   (*lang_hooks.types.register_builtin_type) (intSI_type_node,
1024 					     "__builtin_neon_si");
1025   (*lang_hooks.types.register_builtin_type) (float_type_node,
1026 					     "__builtin_neon_sf");
1027   (*lang_hooks.types.register_builtin_type) (intDI_type_node,
1028 					     "__builtin_neon_di");
1029   (*lang_hooks.types.register_builtin_type) (double_type_node,
1030 					     "__builtin_neon_df");
1031   (*lang_hooks.types.register_builtin_type) (intTI_type_node,
1032 					     "__builtin_neon_ti");
1033 
1034   /* Unsigned integer types for various mode sizes.  */
1035   (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
1036 					     "__builtin_neon_uqi");
1037   (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
1038 					     "__builtin_neon_uhi");
1039   (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
1040 					     "__builtin_neon_usi");
1041   (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
1042 					     "__builtin_neon_udi");
1043   (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
1044 					     "__builtin_neon_uti");
1045 }
1046 
1047 /* Set up a builtin.  It will use information stored in the argument struct D to
1048    derive the builtin's type signature and name.  It will append the name in D
1049    to the PREFIX passed and use these to create a builtin declaration that is
1050    then stored in 'arm_builtin_decls' under index FCODE.  This FCODE is also
1051    written back to D for future use.  */
1052 
1053 static void
arm_init_builtin(unsigned int fcode,arm_builtin_datum * d,const char * prefix)1054 arm_init_builtin (unsigned int fcode, arm_builtin_datum *d,
1055 		  const char * prefix)
1056 {
1057   bool print_type_signature_p = false;
1058   char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
1059   char namebuf[60];
1060   tree ftype = NULL;
1061   tree fndecl = NULL;
1062 
1063   d->fcode = fcode;
1064 
1065   /* We must track two variables here.  op_num is
1066      the operand number as in the RTL pattern.  This is
1067      required to access the mode (e.g. V4SF mode) of the
1068      argument, from which the base type can be derived.
1069      arg_num is an index in to the qualifiers data, which
1070      gives qualifiers to the type (e.g. const unsigned).
1071      The reason these two variables may differ by one is the
1072      void return type.  While all return types take the 0th entry
1073      in the qualifiers array, there is no operand for them in the
1074      RTL pattern.  */
1075   int op_num = insn_data[d->code].n_operands - 1;
1076   int arg_num = d->qualifiers[0] & qualifier_void
1077     ? op_num + 1
1078     : op_num;
1079   tree return_type = void_type_node, args = void_list_node;
1080   tree eltype;
1081 
1082   /* Build a function type directly from the insn_data for this
1083      builtin.  The build_function_type () function takes care of
1084      removing duplicates for us.  */
1085   for (; op_num >= 0; arg_num--, op_num--)
1086     {
1087       machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
1088       enum arm_type_qualifiers qualifiers = d->qualifiers[arg_num];
1089 
1090       if (qualifiers & qualifier_unsigned)
1091 	{
1092 	  type_signature[arg_num] = 'u';
1093 	  print_type_signature_p = true;
1094 	}
1095       else if (qualifiers & qualifier_poly)
1096 	{
1097 	  type_signature[arg_num] = 'p';
1098 	  print_type_signature_p = true;
1099 	}
1100       else
1101 	type_signature[arg_num] = 's';
1102 
1103       /* Skip an internal operand for vget_{low, high}.  */
1104       if (qualifiers & qualifier_internal)
1105 	continue;
1106 
1107       /* Some builtins have different user-facing types
1108 	 for certain arguments, encoded in d->mode.  */
1109       if (qualifiers & qualifier_map_mode)
1110 	op_mode = d->mode;
1111 
1112       /* For pointers, we want a pointer to the basic type
1113 	 of the vector.  */
1114       if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1115 	op_mode = GET_MODE_INNER (op_mode);
1116 
1117       /* For void pointers we already have nodes constructed by the midend.  */
1118       if (qualifiers & qualifier_void_pointer)
1119 	eltype = qualifiers & qualifier_const
1120 		 ? const_ptr_type_node : ptr_type_node;
1121       else
1122 	{
1123 	  eltype
1124 	    = arm_simd_builtin_type (op_mode,
1125 				     (qualifiers & qualifier_unsigned) != 0,
1126 				     (qualifiers & qualifier_poly) != 0);
1127 	  gcc_assert (eltype != NULL);
1128 
1129 	  /* Add qualifiers.  */
1130 	  if (qualifiers & qualifier_const)
1131 	    eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1132 
1133 	  if (qualifiers & qualifier_pointer)
1134 	    eltype = build_pointer_type (eltype);
1135 	}
1136       /* If we have reached arg_num == 0, we are at a non-void
1137 	 return type.  Otherwise, we are still processing
1138 	 arguments.  */
1139       if (arg_num == 0)
1140 	return_type = eltype;
1141       else
1142 	args = tree_cons (NULL_TREE, eltype, args);
1143     }
1144 
1145   ftype = build_function_type (return_type, args);
1146 
1147   gcc_assert (ftype != NULL);
1148 
1149   if (print_type_signature_p
1150       && IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
1151     snprintf (namebuf, sizeof (namebuf), "%s_%s_%s",
1152 	      prefix, d->name, type_signature);
1153   else
1154     snprintf (namebuf, sizeof (namebuf), "%s_%s",
1155 	      prefix, d->name);
1156 
1157   fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
1158 				 NULL, NULL_TREE);
1159   arm_builtin_decls[fcode] = fndecl;
1160 }
1161 
1162 /* Set up ACLE builtins, even builtins for instructions that are not
1163    in the current target ISA to allow the user to compile particular modules
1164    with different target specific options that differ from the command line
1165    options.  Such builtins will be rejected in arm_expand_builtin.  */
1166 
1167 static void
arm_init_acle_builtins(void)1168 arm_init_acle_builtins (void)
1169 {
1170   unsigned int i, fcode = ARM_BUILTIN_ACLE_PATTERN_START;
1171 
1172   for (i = 0; i < ARRAY_SIZE (acle_builtin_data); i++, fcode++)
1173     {
1174       arm_builtin_datum *d = &acle_builtin_data[i];
1175       arm_init_builtin (fcode, d, "__builtin_arm");
1176     }
1177 }
1178 
1179 /* Set up all the NEON builtins, even builtins for instructions that are not
1180    in the current target ISA to allow the user to compile particular modules
1181    with different target specific options that differ from the command line
1182    options. Such builtins will be rejected in arm_expand_builtin.  */
1183 
1184 static void
arm_init_neon_builtins(void)1185 arm_init_neon_builtins (void)
1186 {
1187   unsigned int i, fcode = ARM_BUILTIN_NEON_PATTERN_START;
1188 
1189   arm_init_simd_builtin_types ();
1190 
1191   /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1192      Therefore we need to preserve the old __builtin scalar types.  It can be
1193      removed once all the intrinsics become strongly typed using the qualifier
1194      system.  */
1195   arm_init_simd_builtin_scalar_types ();
1196 
1197   tree lane_check_fpr = build_function_type_list (void_type_node,
1198 						  intSI_type_node,
1199 						  intSI_type_node,
1200 						  NULL);
1201   arm_builtin_decls[ARM_BUILTIN_NEON_LANE_CHECK] =
1202       add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr,
1203 			    ARM_BUILTIN_NEON_LANE_CHECK, BUILT_IN_MD,
1204 			    NULL, NULL_TREE);
1205 
1206   for (i = 0; i < ARRAY_SIZE (neon_builtin_data); i++, fcode++)
1207     {
1208       arm_builtin_datum *d = &neon_builtin_data[i];
1209       arm_init_builtin (fcode, d, "__builtin_neon");
1210     }
1211 }
1212 
1213 /* Set up all the scalar floating point builtins.  */
1214 
1215 static void
arm_init_vfp_builtins(void)1216 arm_init_vfp_builtins (void)
1217 {
1218   unsigned int i, fcode = ARM_BUILTIN_VFP_PATTERN_START;
1219 
1220   for (i = 0; i < ARRAY_SIZE (vfp_builtin_data); i++, fcode++)
1221     {
1222       arm_builtin_datum *d = &vfp_builtin_data[i];
1223       arm_init_builtin (fcode, d, "__builtin_neon");
1224     }
1225 }
1226 
1227 static void
arm_init_crypto_builtins(void)1228 arm_init_crypto_builtins (void)
1229 {
1230   tree V16UQI_type_node
1231     = arm_simd_builtin_type (V16QImode, true, false);
1232 
1233   tree V4USI_type_node
1234     = arm_simd_builtin_type (V4SImode, true, false);
1235 
1236   tree v16uqi_ftype_v16uqi
1237     = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1238 				NULL_TREE);
1239 
1240   tree v16uqi_ftype_v16uqi_v16uqi
1241 	= build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1242 				    V16UQI_type_node, NULL_TREE);
1243 
1244   tree v4usi_ftype_v4usi
1245     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1246 				NULL_TREE);
1247 
1248   tree v4usi_ftype_v4usi_v4usi
1249     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1250 				V4USI_type_node, NULL_TREE);
1251 
1252   tree v4usi_ftype_v4usi_v4usi_v4usi
1253     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1254 				V4USI_type_node, V4USI_type_node,
1255 				NULL_TREE);
1256 
1257   tree uti_ftype_udi_udi
1258     = build_function_type_list (unsigned_intTI_type_node,
1259 				unsigned_intDI_type_node,
1260 				unsigned_intDI_type_node,
1261 				NULL_TREE);
1262 
1263   #undef CRYPTO1
1264   #undef CRYPTO2
1265   #undef CRYPTO3
1266   #undef C
1267   #undef N
1268   #undef CF
1269   #undef FT1
1270   #undef FT2
1271   #undef FT3
1272 
1273   #define C(U) \
1274     ARM_BUILTIN_CRYPTO_##U
1275   #define N(L) \
1276     "__builtin_arm_crypto_"#L
1277   #define FT1(R, A) \
1278     R##_ftype_##A
1279   #define FT2(R, A1, A2) \
1280     R##_ftype_##A1##_##A2
1281   #define FT3(R, A1, A2, A3) \
1282     R##_ftype_##A1##_##A2##_##A3
1283   #define CRYPTO1(L, U, R, A) \
1284     arm_builtin_decls[C (U)] \
1285       = add_builtin_function (N (L), FT1 (R, A), \
1286 		  C (U), BUILT_IN_MD, NULL, NULL_TREE);
1287   #define CRYPTO2(L, U, R, A1, A2)  \
1288     arm_builtin_decls[C (U)]	\
1289       = add_builtin_function (N (L), FT2 (R, A1, A2), \
1290 		  C (U), BUILT_IN_MD, NULL, NULL_TREE);
1291 
1292   #define CRYPTO3(L, U, R, A1, A2, A3) \
1293     arm_builtin_decls[C (U)]	   \
1294       = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
1295 				  C (U), BUILT_IN_MD, NULL, NULL_TREE);
1296   #include "crypto.def"
1297 
1298   #undef CRYPTO1
1299   #undef CRYPTO2
1300   #undef CRYPTO3
1301   #undef C
1302   #undef N
1303   #undef FT1
1304   #undef FT2
1305   #undef FT3
1306 }
1307 
1308 #undef NUM_DREG_TYPES
1309 #undef NUM_QREG_TYPES
1310 
1311 #define def_mbuiltin(FLAG, NAME, TYPE, CODE)				\
1312   do									\
1313     {									\
1314       if (FLAG == isa_nobit						\
1315 	  || bitmap_bit_p (arm_active_target.isa, FLAG))		\
1316 	{								\
1317 	  tree bdecl;							\
1318 	  bdecl = add_builtin_function ((NAME), (TYPE), (CODE),		\
1319 					BUILT_IN_MD, NULL, NULL_TREE);	\
1320 	  arm_builtin_decls[CODE] = bdecl;				\
1321 	}								\
1322     }									\
1323   while (0)
1324 
1325 struct builtin_description
1326 {
1327   const enum isa_feature   feature;
1328   const enum insn_code     icode;
1329   const char * const       name;
1330   const enum arm_builtins  code;
1331   const enum rtx_code      comparison;
1332   const unsigned int       flag;
1333 };
1334 
1335 static const struct builtin_description bdesc_2arg[] =
1336 {
1337 #define IWMMXT_BUILTIN(code, string, builtin) \
1338   { isa_bit_iwmmxt, CODE_FOR_##code, \
1339     "__builtin_arm_" string,			     \
1340     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1341 
1342 #define IWMMXT2_BUILTIN(code, string, builtin) \
1343   { isa_bit_iwmmxt2, CODE_FOR_##code, \
1344     "__builtin_arm_" string,			      \
1345     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1346 
1347   IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
1348   IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
1349   IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
1350   IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
1351   IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
1352   IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
1353   IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
1354   IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
1355   IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
1356   IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
1357   IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
1358   IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
1359   IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
1360   IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
1361   IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
1362   IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
1363   IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
1364   IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
1365   IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
1366   IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
1367   IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
1368   IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
1369   IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
1370   IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
1371   IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
1372   IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
1373   IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
1374   IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
1375   IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
1376   IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
1377   IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
1378   IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
1379   IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
1380   IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
1381   IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
1382   IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
1383   IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
1384   IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
1385   IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
1386   IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
1387   IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
1388   IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
1389   IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
1390   IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
1391   IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
1392   IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
1393   IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
1394   IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
1395   IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
1396   IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
1397   IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
1398   IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
1399   IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
1400   IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
1401   IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
1402   IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
1403   IWMMXT2_BUILTIN (iwmmxt_waddsubhx, "waddsubhx", WADDSUBHX)
1404   IWMMXT2_BUILTIN (iwmmxt_wsubaddhx, "wsubaddhx", WSUBADDHX)
1405   IWMMXT2_BUILTIN (iwmmxt_wabsdiffb, "wabsdiffb", WABSDIFFB)
1406   IWMMXT2_BUILTIN (iwmmxt_wabsdiffh, "wabsdiffh", WABSDIFFH)
1407   IWMMXT2_BUILTIN (iwmmxt_wabsdiffw, "wabsdiffw", WABSDIFFW)
1408   IWMMXT2_BUILTIN (iwmmxt_avg4, "wavg4", WAVG4)
1409   IWMMXT2_BUILTIN (iwmmxt_avg4r, "wavg4r", WAVG4R)
1410   IWMMXT2_BUILTIN (iwmmxt_wmulwsm, "wmulwsm", WMULWSM)
1411   IWMMXT2_BUILTIN (iwmmxt_wmulwum, "wmulwum", WMULWUM)
1412   IWMMXT2_BUILTIN (iwmmxt_wmulwsmr, "wmulwsmr", WMULWSMR)
1413   IWMMXT2_BUILTIN (iwmmxt_wmulwumr, "wmulwumr", WMULWUMR)
1414   IWMMXT2_BUILTIN (iwmmxt_wmulwl, "wmulwl", WMULWL)
1415   IWMMXT2_BUILTIN (iwmmxt_wmulsmr, "wmulsmr", WMULSMR)
1416   IWMMXT2_BUILTIN (iwmmxt_wmulumr, "wmulumr", WMULUMR)
1417   IWMMXT2_BUILTIN (iwmmxt_wqmulm, "wqmulm", WQMULM)
1418   IWMMXT2_BUILTIN (iwmmxt_wqmulmr, "wqmulmr", WQMULMR)
1419   IWMMXT2_BUILTIN (iwmmxt_wqmulwm, "wqmulwm", WQMULWM)
1420   IWMMXT2_BUILTIN (iwmmxt_wqmulwmr, "wqmulwmr", WQMULWMR)
1421   IWMMXT_BUILTIN (iwmmxt_walignr0, "walignr0", WALIGNR0)
1422   IWMMXT_BUILTIN (iwmmxt_walignr1, "walignr1", WALIGNR1)
1423   IWMMXT_BUILTIN (iwmmxt_walignr2, "walignr2", WALIGNR2)
1424   IWMMXT_BUILTIN (iwmmxt_walignr3, "walignr3", WALIGNR3)
1425 
1426 #define IWMMXT_BUILTIN2(code, builtin) \
1427   { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
1428     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1429 
1430 #define IWMMXT2_BUILTIN2(code, builtin) \
1431   { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
1432     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1433 
1434   IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm, WADDBHUSM)
1435   IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl, WADDBHUSL)
1436   IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
1437   IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
1438   IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
1439   IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
1440   IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
1441   IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
1442   IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
1443   IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
1444 
1445 
1446 #define FP_BUILTIN(L, U) \
1447   {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
1448    UNKNOWN, 0},
1449 
1450   FP_BUILTIN (get_fpscr, GET_FPSCR)
1451   FP_BUILTIN (set_fpscr, SET_FPSCR)
1452 #undef FP_BUILTIN
1453 
1454 #define CRYPTO_BUILTIN(L, U)					   \
1455   {isa_nobit, CODE_FOR_crypto_##L,	"__builtin_arm_crypto_"#L, \
1456    ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
1457 #undef CRYPTO1
1458 #undef CRYPTO2
1459 #undef CRYPTO3
1460 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
1461 #define CRYPTO1(L, U, R, A)
1462 #define CRYPTO3(L, U, R, A1, A2, A3)
1463 #include "crypto.def"
1464 #undef CRYPTO1
1465 #undef CRYPTO2
1466 #undef CRYPTO3
1467 
1468 };
1469 
1470 static const struct builtin_description bdesc_1arg[] =
1471 {
1472   IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
1473   IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
1474   IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
1475   IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
1476   IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
1477   IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
1478   IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
1479   IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
1480   IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
1481   IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
1482   IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
1483   IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
1484   IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
1485   IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
1486   IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
1487   IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
1488   IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
1489   IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
1490   IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3, "wabsb", WABSB)
1491   IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3, "wabsh", WABSH)
1492   IWMMXT2_BUILTIN (iwmmxt_wabsv2si3, "wabsw", WABSW)
1493   IWMMXT_BUILTIN (tbcstv8qi, "tbcstb", TBCSTB)
1494   IWMMXT_BUILTIN (tbcstv4hi, "tbcsth", TBCSTH)
1495   IWMMXT_BUILTIN (tbcstv2si, "tbcstw", TBCSTW)
1496 
1497 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
1498 #define CRYPTO2(L, U, R, A1, A2)
1499 #define CRYPTO3(L, U, R, A1, A2, A3)
1500 #include "crypto.def"
1501 #undef CRYPTO1
1502 #undef CRYPTO2
1503 #undef CRYPTO3
1504 };
1505 
1506 static const struct builtin_description bdesc_3arg[] =
1507 {
1508 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
1509 #define CRYPTO1(L, U, R, A)
1510 #define CRYPTO2(L, U, R, A1, A2)
1511 #include "crypto.def"
1512 #undef CRYPTO1
1513 #undef CRYPTO2
1514 #undef CRYPTO3
1515  };
1516 #undef CRYPTO_BUILTIN
1517 
1518 /* Set up all the iWMMXt builtins.  This is not called if
1519    TARGET_IWMMXT is zero.  */
1520 
1521 static void
arm_init_iwmmxt_builtins(void)1522 arm_init_iwmmxt_builtins (void)
1523 {
1524   const struct builtin_description * d;
1525   size_t i;
1526 
1527   tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
1528   tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
1529   tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
1530 
1531   tree v8qi_ftype_v8qi_v8qi_int
1532     = build_function_type_list (V8QI_type_node,
1533 				V8QI_type_node, V8QI_type_node,
1534 				integer_type_node, NULL_TREE);
1535   tree v4hi_ftype_v4hi_int
1536     = build_function_type_list (V4HI_type_node,
1537 				V4HI_type_node, integer_type_node, NULL_TREE);
1538   tree v2si_ftype_v2si_int
1539     = build_function_type_list (V2SI_type_node,
1540 				V2SI_type_node, integer_type_node, NULL_TREE);
1541   tree v2si_ftype_di_di
1542     = build_function_type_list (V2SI_type_node,
1543 				long_long_integer_type_node,
1544 				long_long_integer_type_node,
1545 				NULL_TREE);
1546   tree di_ftype_di_int
1547     = build_function_type_list (long_long_integer_type_node,
1548 				long_long_integer_type_node,
1549 				integer_type_node, NULL_TREE);
1550   tree di_ftype_di_int_int
1551     = build_function_type_list (long_long_integer_type_node,
1552 				long_long_integer_type_node,
1553 				integer_type_node,
1554 				integer_type_node, NULL_TREE);
1555   tree int_ftype_v8qi
1556     = build_function_type_list (integer_type_node,
1557 				V8QI_type_node, NULL_TREE);
1558   tree int_ftype_v4hi
1559     = build_function_type_list (integer_type_node,
1560 				V4HI_type_node, NULL_TREE);
1561   tree int_ftype_v2si
1562     = build_function_type_list (integer_type_node,
1563 				V2SI_type_node, NULL_TREE);
1564   tree int_ftype_v8qi_int
1565     = build_function_type_list (integer_type_node,
1566 				V8QI_type_node, integer_type_node, NULL_TREE);
1567   tree int_ftype_v4hi_int
1568     = build_function_type_list (integer_type_node,
1569 				V4HI_type_node, integer_type_node, NULL_TREE);
1570   tree int_ftype_v2si_int
1571     = build_function_type_list (integer_type_node,
1572 				V2SI_type_node, integer_type_node, NULL_TREE);
1573   tree v8qi_ftype_v8qi_int_int
1574     = build_function_type_list (V8QI_type_node,
1575 				V8QI_type_node, integer_type_node,
1576 				integer_type_node, NULL_TREE);
1577   tree v4hi_ftype_v4hi_int_int
1578     = build_function_type_list (V4HI_type_node,
1579 				V4HI_type_node, integer_type_node,
1580 				integer_type_node, NULL_TREE);
1581   tree v2si_ftype_v2si_int_int
1582     = build_function_type_list (V2SI_type_node,
1583 				V2SI_type_node, integer_type_node,
1584 				integer_type_node, NULL_TREE);
1585   /* Miscellaneous.  */
1586   tree v8qi_ftype_v4hi_v4hi
1587     = build_function_type_list (V8QI_type_node,
1588 				V4HI_type_node, V4HI_type_node, NULL_TREE);
1589   tree v4hi_ftype_v2si_v2si
1590     = build_function_type_list (V4HI_type_node,
1591 				V2SI_type_node, V2SI_type_node, NULL_TREE);
1592   tree v8qi_ftype_v4hi_v8qi
1593     = build_function_type_list (V8QI_type_node,
1594 	                        V4HI_type_node, V8QI_type_node, NULL_TREE);
1595   tree v2si_ftype_v4hi_v4hi
1596     = build_function_type_list (V2SI_type_node,
1597 				V4HI_type_node, V4HI_type_node, NULL_TREE);
1598   tree v2si_ftype_v8qi_v8qi
1599     = build_function_type_list (V2SI_type_node,
1600 				V8QI_type_node, V8QI_type_node, NULL_TREE);
1601   tree v4hi_ftype_v4hi_di
1602     = build_function_type_list (V4HI_type_node,
1603 				V4HI_type_node, long_long_integer_type_node,
1604 				NULL_TREE);
1605   tree v2si_ftype_v2si_di
1606     = build_function_type_list (V2SI_type_node,
1607 				V2SI_type_node, long_long_integer_type_node,
1608 				NULL_TREE);
1609   tree di_ftype_void
1610     = build_function_type_list (long_long_unsigned_type_node, NULL_TREE);
1611   tree int_ftype_void
1612     = build_function_type_list (integer_type_node, NULL_TREE);
1613   tree di_ftype_v8qi
1614     = build_function_type_list (long_long_integer_type_node,
1615 				V8QI_type_node, NULL_TREE);
1616   tree di_ftype_v4hi
1617     = build_function_type_list (long_long_integer_type_node,
1618 				V4HI_type_node, NULL_TREE);
1619   tree di_ftype_v2si
1620     = build_function_type_list (long_long_integer_type_node,
1621 				V2SI_type_node, NULL_TREE);
1622   tree v2si_ftype_v4hi
1623     = build_function_type_list (V2SI_type_node,
1624 				V4HI_type_node, NULL_TREE);
1625   tree v4hi_ftype_v8qi
1626     = build_function_type_list (V4HI_type_node,
1627 				V8QI_type_node, NULL_TREE);
1628   tree v8qi_ftype_v8qi
1629     = build_function_type_list (V8QI_type_node,
1630 	                        V8QI_type_node, NULL_TREE);
1631   tree v4hi_ftype_v4hi
1632     = build_function_type_list (V4HI_type_node,
1633 	                        V4HI_type_node, NULL_TREE);
1634   tree v2si_ftype_v2si
1635     = build_function_type_list (V2SI_type_node,
1636 	                        V2SI_type_node, NULL_TREE);
1637 
1638   tree di_ftype_di_v4hi_v4hi
1639     = build_function_type_list (long_long_unsigned_type_node,
1640 				long_long_unsigned_type_node,
1641 				V4HI_type_node, V4HI_type_node,
1642 				NULL_TREE);
1643 
1644   tree di_ftype_v4hi_v4hi
1645     = build_function_type_list (long_long_unsigned_type_node,
1646 				V4HI_type_node,V4HI_type_node,
1647 				NULL_TREE);
1648 
1649   tree v2si_ftype_v2si_v4hi_v4hi
1650     = build_function_type_list (V2SI_type_node,
1651                                 V2SI_type_node, V4HI_type_node,
1652                                 V4HI_type_node, NULL_TREE);
1653 
1654   tree v2si_ftype_v2si_v8qi_v8qi
1655     = build_function_type_list (V2SI_type_node,
1656                                 V2SI_type_node, V8QI_type_node,
1657                                 V8QI_type_node, NULL_TREE);
1658 
1659   tree di_ftype_di_v2si_v2si
1660      = build_function_type_list (long_long_unsigned_type_node,
1661                                  long_long_unsigned_type_node,
1662                                  V2SI_type_node, V2SI_type_node,
1663                                  NULL_TREE);
1664 
1665    tree di_ftype_di_di_int
1666      = build_function_type_list (long_long_unsigned_type_node,
1667                                  long_long_unsigned_type_node,
1668                                  long_long_unsigned_type_node,
1669                                  integer_type_node, NULL_TREE);
1670 
1671    tree void_ftype_int
1672      = build_function_type_list (void_type_node,
1673                                  integer_type_node, NULL_TREE);
1674 
1675    tree v8qi_ftype_char
1676      = build_function_type_list (V8QI_type_node,
1677                                  signed_char_type_node, NULL_TREE);
1678 
1679    tree v4hi_ftype_short
1680      = build_function_type_list (V4HI_type_node,
1681                                  short_integer_type_node, NULL_TREE);
1682 
1683    tree v2si_ftype_int
1684      = build_function_type_list (V2SI_type_node,
1685                                  integer_type_node, NULL_TREE);
1686 
1687   /* Normal vector binops.  */
1688   tree v8qi_ftype_v8qi_v8qi
1689     = build_function_type_list (V8QI_type_node,
1690 				V8QI_type_node, V8QI_type_node, NULL_TREE);
1691   tree v4hi_ftype_v4hi_v4hi
1692     = build_function_type_list (V4HI_type_node,
1693 				V4HI_type_node,V4HI_type_node, NULL_TREE);
1694   tree v2si_ftype_v2si_v2si
1695     = build_function_type_list (V2SI_type_node,
1696 				V2SI_type_node, V2SI_type_node, NULL_TREE);
1697   tree di_ftype_di_di
1698     = build_function_type_list (long_long_unsigned_type_node,
1699 				long_long_unsigned_type_node,
1700 				long_long_unsigned_type_node,
1701 				NULL_TREE);
1702 
1703   /* Add all builtins that are more or less simple operations on two
1704      operands.  */
1705   for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1706     {
1707       /* Use one of the operands; the target can have a different mode for
1708 	 mask-generating compares.  */
1709       machine_mode mode;
1710       tree type;
1711 
1712       if (d->name == 0
1713 	  || !(d->feature == isa_bit_iwmmxt
1714 	       || d->feature == isa_bit_iwmmxt2))
1715 	continue;
1716 
1717       mode = insn_data[d->icode].operand[1].mode;
1718 
1719       switch (mode)
1720 	{
1721 	case E_V8QImode:
1722 	  type = v8qi_ftype_v8qi_v8qi;
1723 	  break;
1724 	case E_V4HImode:
1725 	  type = v4hi_ftype_v4hi_v4hi;
1726 	  break;
1727 	case E_V2SImode:
1728 	  type = v2si_ftype_v2si_v2si;
1729 	  break;
1730 	case E_DImode:
1731 	  type = di_ftype_di_di;
1732 	  break;
1733 
1734 	default:
1735 	  gcc_unreachable ();
1736 	}
1737 
1738       def_mbuiltin (d->feature, d->name, type, d->code);
1739     }
1740 
1741   /* Add the remaining MMX insns with somewhat more complicated types.  */
1742 #define iwmmx_mbuiltin(NAME, TYPE, CODE)			\
1743   def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
1744 		(TYPE), ARM_BUILTIN_ ## CODE)
1745 
1746 #define iwmmx2_mbuiltin(NAME, TYPE, CODE)                      \
1747   def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
1748 		(TYPE),	ARM_BUILTIN_ ## CODE)
1749 
1750   iwmmx_mbuiltin ("wzero", di_ftype_void, WZERO);
1751   iwmmx_mbuiltin ("setwcgr0", void_ftype_int, SETWCGR0);
1752   iwmmx_mbuiltin ("setwcgr1", void_ftype_int, SETWCGR1);
1753   iwmmx_mbuiltin ("setwcgr2", void_ftype_int, SETWCGR2);
1754   iwmmx_mbuiltin ("setwcgr3", void_ftype_int, SETWCGR3);
1755   iwmmx_mbuiltin ("getwcgr0", int_ftype_void, GETWCGR0);
1756   iwmmx_mbuiltin ("getwcgr1", int_ftype_void, GETWCGR1);
1757   iwmmx_mbuiltin ("getwcgr2", int_ftype_void, GETWCGR2);
1758   iwmmx_mbuiltin ("getwcgr3", int_ftype_void, GETWCGR3);
1759 
1760   iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di, WSLLH);
1761   iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di, WSLLW);
1762   iwmmx_mbuiltin ("wslld", di_ftype_di_di, WSLLD);
1763   iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int, WSLLHI);
1764   iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int, WSLLWI);
1765   iwmmx_mbuiltin ("wslldi", di_ftype_di_int, WSLLDI);
1766 
1767   iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di, WSRLH);
1768   iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di, WSRLW);
1769   iwmmx_mbuiltin ("wsrld", di_ftype_di_di, WSRLD);
1770   iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int, WSRLHI);
1771   iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int, WSRLWI);
1772   iwmmx_mbuiltin ("wsrldi", di_ftype_di_int, WSRLDI);
1773 
1774   iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di, WSRAH);
1775   iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di, WSRAW);
1776   iwmmx_mbuiltin ("wsrad", di_ftype_di_di, WSRAD);
1777   iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int, WSRAHI);
1778   iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int, WSRAWI);
1779   iwmmx_mbuiltin ("wsradi", di_ftype_di_int, WSRADI);
1780 
1781   iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di, WRORH);
1782   iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di, WRORW);
1783   iwmmx_mbuiltin ("wrord", di_ftype_di_di, WRORD);
1784   iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int, WRORHI);
1785   iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int, WRORWI);
1786   iwmmx_mbuiltin ("wrordi", di_ftype_di_int, WRORDI);
1787 
1788   iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int, WSHUFH);
1789 
1790   iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi, WSADB);
1791   iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi, WSADH);
1792   iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi, WMADDS);
1793   iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi, WMADDSX);
1794   iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi, WMADDSN);
1795   iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi, WMADDU);
1796   iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi, WMADDUX);
1797   iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi, WMADDUN);
1798   iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi, WSADBZ);
1799   iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi, WSADHZ);
1800 
1801   iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int, TEXTRMSB);
1802   iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int, TEXTRMSH);
1803   iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int, TEXTRMSW);
1804   iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int, TEXTRMUB);
1805   iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int, TEXTRMUH);
1806   iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int, TEXTRMUW);
1807   iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int, TINSRB);
1808   iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int, TINSRH);
1809   iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int, TINSRW);
1810 
1811   iwmmx_mbuiltin ("waccb", di_ftype_v8qi, WACCB);
1812   iwmmx_mbuiltin ("wacch", di_ftype_v4hi, WACCH);
1813   iwmmx_mbuiltin ("waccw", di_ftype_v2si, WACCW);
1814 
1815   iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi, TMOVMSKB);
1816   iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi, TMOVMSKH);
1817   iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si, TMOVMSKW);
1818 
1819   iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi, WADDBHUSM);
1820   iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi, WADDBHUSL);
1821 
1822   iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi, WPACKHSS);
1823   iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi, WPACKHUS);
1824   iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si, WPACKWUS);
1825   iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si, WPACKWSS);
1826   iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di, WPACKDUS);
1827   iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di, WPACKDSS);
1828 
1829   iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi, WUNPCKEHUB);
1830   iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi, WUNPCKEHUH);
1831   iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si, WUNPCKEHUW);
1832   iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi, WUNPCKEHSB);
1833   iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi, WUNPCKEHSH);
1834   iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si, WUNPCKEHSW);
1835   iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi, WUNPCKELUB);
1836   iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi, WUNPCKELUH);
1837   iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si, WUNPCKELUW);
1838   iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi, WUNPCKELSB);
1839   iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi, WUNPCKELSH);
1840   iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si, WUNPCKELSW);
1841 
1842   iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi, WMACS);
1843   iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi, WMACSZ);
1844   iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi, WMACU);
1845   iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi, WMACUZ);
1846 
1847   iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int, WALIGNI);
1848   iwmmx_mbuiltin ("tmia", di_ftype_di_int_int, TMIA);
1849   iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int, TMIAPH);
1850   iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int, TMIABB);
1851   iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int, TMIABT);
1852   iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int, TMIATB);
1853   iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int, TMIATT);
1854 
1855   iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi, WABSB);
1856   iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi, WABSH);
1857   iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si, WABSW);
1858 
1859   iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi, WQMIABB);
1860   iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi, WQMIABT);
1861   iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi, WQMIATB);
1862   iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi, WQMIATT);
1863 
1864   iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi, WQMIABBN);
1865   iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi, WQMIABTN);
1866   iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi, WQMIATBN);
1867   iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi, WQMIATTN);
1868 
1869   iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi, WMIABB);
1870   iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi, WMIABT);
1871   iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi, WMIATB);
1872   iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi, WMIATT);
1873 
1874   iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi, WMIABBN);
1875   iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi, WMIABTN);
1876   iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi, WMIATBN);
1877   iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi, WMIATTN);
1878 
1879   iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si, WMIAWBB);
1880   iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si, WMIAWBT);
1881   iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si, WMIAWTB);
1882   iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si, WMIAWTT);
1883 
1884   iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si, WMIAWBBN);
1885   iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si, WMIAWBTN);
1886   iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si, WMIAWTBN);
1887   iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si, WMIAWTTN);
1888 
1889   iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int, WMERGE);
1890 
1891   iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char, TBCSTB);
1892   iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short, TBCSTH);
1893   iwmmx_mbuiltin ("tbcstw", v2si_ftype_int, TBCSTW);
1894 
1895 #undef iwmmx_mbuiltin
1896 #undef iwmmx2_mbuiltin
1897 }
1898 
1899 static void
arm_init_fp16_builtins(void)1900 arm_init_fp16_builtins (void)
1901 {
1902   arm_fp16_type_node = make_node (REAL_TYPE);
1903   TYPE_PRECISION (arm_fp16_type_node) = GET_MODE_PRECISION (HFmode);
1904   layout_type (arm_fp16_type_node);
1905   if (arm_fp16_format)
1906     (*lang_hooks.types.register_builtin_type) (arm_fp16_type_node,
1907 					       "__fp16");
1908 }
1909 
1910 void
arm_init_builtins(void)1911 arm_init_builtins (void)
1912 {
1913   if (TARGET_REALLY_IWMMXT)
1914     arm_init_iwmmxt_builtins ();
1915 
1916   /* This creates the arm_simd_floatHF_type_node so must come before
1917      arm_init_neon_builtins which uses it.  */
1918   arm_init_fp16_builtins ();
1919 
1920   if (TARGET_MAYBE_HARD_FLOAT)
1921     {
1922       arm_init_neon_builtins ();
1923       arm_init_vfp_builtins ();
1924       arm_init_crypto_builtins ();
1925     }
1926 
1927   arm_init_acle_builtins ();
1928 
1929   if (TARGET_MAYBE_HARD_FLOAT)
1930     {
1931       tree ftype_set_fpscr
1932 	= build_function_type_list (void_type_node, unsigned_type_node, NULL);
1933       tree ftype_get_fpscr
1934 	= build_function_type_list (unsigned_type_node, NULL);
1935 
1936       arm_builtin_decls[ARM_BUILTIN_GET_FPSCR]
1937 	= add_builtin_function ("__builtin_arm_get_fpscr", ftype_get_fpscr,
1938 				ARM_BUILTIN_GET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
1939       arm_builtin_decls[ARM_BUILTIN_SET_FPSCR]
1940 	= add_builtin_function ("__builtin_arm_set_fpscr", ftype_set_fpscr,
1941 				ARM_BUILTIN_SET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
1942     }
1943 
1944   if (use_cmse)
1945     {
1946       tree ftype_cmse_nonsecure_caller
1947 	= build_function_type_list (unsigned_type_node, NULL);
1948       arm_builtin_decls[ARM_BUILTIN_CMSE_NONSECURE_CALLER]
1949 	= add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
1950 				ftype_cmse_nonsecure_caller,
1951 				ARM_BUILTIN_CMSE_NONSECURE_CALLER, BUILT_IN_MD,
1952 				NULL, NULL_TREE);
1953     }
1954 }
1955 
1956 /* Return the ARM builtin for CODE.  */
1957 
1958 tree
arm_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)1959 arm_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
1960 {
1961   if (code >= ARM_BUILTIN_MAX)
1962     return error_mark_node;
1963 
1964   return arm_builtin_decls[code];
1965 }
1966 
1967 /* Errors in the source file can cause expand_expr to return const0_rtx
1968    where we expect a vector.  To avoid crashing, use one of the vector
1969    clear instructions.  */
1970 
1971 static rtx
safe_vector_operand(rtx x,machine_mode mode)1972 safe_vector_operand (rtx x, machine_mode mode)
1973 {
1974   if (x != const0_rtx)
1975     return x;
1976   x = gen_reg_rtx (mode);
1977 
1978   emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
1979 			       : gen_rtx_SUBREG (DImode, x, 0)));
1980   return x;
1981 }
1982 
1983 /* Function to expand ternary builtins.  */
1984 static rtx
arm_expand_ternop_builtin(enum insn_code icode,tree exp,rtx target)1985 arm_expand_ternop_builtin (enum insn_code icode,
1986                            tree exp, rtx target)
1987 {
1988   rtx pat;
1989   tree arg0 = CALL_EXPR_ARG (exp, 0);
1990   tree arg1 = CALL_EXPR_ARG (exp, 1);
1991   tree arg2 = CALL_EXPR_ARG (exp, 2);
1992 
1993   rtx op0 = expand_normal (arg0);
1994   rtx op1 = expand_normal (arg1);
1995   rtx op2 = expand_normal (arg2);
1996   rtx op3 = NULL_RTX;
1997 
1998   /* The sha1c, sha1p, sha1m crypto builtins require a different vec_select
1999      lane operand depending on endianness.  */
2000   bool builtin_sha1cpm_p = false;
2001 
2002   if (insn_data[icode].n_operands == 5)
2003     {
2004       gcc_assert (icode == CODE_FOR_crypto_sha1c
2005                   || icode == CODE_FOR_crypto_sha1p
2006                   || icode == CODE_FOR_crypto_sha1m);
2007       builtin_sha1cpm_p = true;
2008     }
2009   machine_mode tmode = insn_data[icode].operand[0].mode;
2010   machine_mode mode0 = insn_data[icode].operand[1].mode;
2011   machine_mode mode1 = insn_data[icode].operand[2].mode;
2012   machine_mode mode2 = insn_data[icode].operand[3].mode;
2013 
2014 
2015   if (VECTOR_MODE_P (mode0))
2016     op0 = safe_vector_operand (op0, mode0);
2017   if (VECTOR_MODE_P (mode1))
2018     op1 = safe_vector_operand (op1, mode1);
2019   if (VECTOR_MODE_P (mode2))
2020     op2 = safe_vector_operand (op2, mode2);
2021 
2022   if (! target
2023       || GET_MODE (target) != tmode
2024       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2025     target = gen_reg_rtx (tmode);
2026 
2027   gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2028 	      && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode)
2029 	      && (GET_MODE (op2) == mode2 || GET_MODE (op2) == VOIDmode));
2030 
2031   if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2032     op0 = copy_to_mode_reg (mode0, op0);
2033   if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2034     op1 = copy_to_mode_reg (mode1, op1);
2035   if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2036     op2 = copy_to_mode_reg (mode2, op2);
2037   if (builtin_sha1cpm_p)
2038     op3 = GEN_INT (TARGET_BIG_END ? 1 : 0);
2039 
2040   if (builtin_sha1cpm_p)
2041     pat = GEN_FCN (icode) (target, op0, op1, op2, op3);
2042   else
2043     pat = GEN_FCN (icode) (target, op0, op1, op2);
2044   if (! pat)
2045     return 0;
2046   emit_insn (pat);
2047   return target;
2048 }
2049 
2050 /* Subroutine of arm_expand_builtin to take care of binop insns.  */
2051 
2052 static rtx
arm_expand_binop_builtin(enum insn_code icode,tree exp,rtx target)2053 arm_expand_binop_builtin (enum insn_code icode,
2054 			  tree exp, rtx target)
2055 {
2056   rtx pat;
2057   tree arg0 = CALL_EXPR_ARG (exp, 0);
2058   tree arg1 = CALL_EXPR_ARG (exp, 1);
2059   rtx op0 = expand_normal (arg0);
2060   rtx op1 = expand_normal (arg1);
2061   machine_mode tmode = insn_data[icode].operand[0].mode;
2062   machine_mode mode0 = insn_data[icode].operand[1].mode;
2063   machine_mode mode1 = insn_data[icode].operand[2].mode;
2064 
2065   if (VECTOR_MODE_P (mode0))
2066     op0 = safe_vector_operand (op0, mode0);
2067   if (VECTOR_MODE_P (mode1))
2068     op1 = safe_vector_operand (op1, mode1);
2069 
2070   if (! target
2071       || GET_MODE (target) != tmode
2072       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2073     target = gen_reg_rtx (tmode);
2074 
2075   gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2076 	      && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2077 
2078   if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2079     op0 = copy_to_mode_reg (mode0, op0);
2080   if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2081     op1 = copy_to_mode_reg (mode1, op1);
2082 
2083   pat = GEN_FCN (icode) (target, op0, op1);
2084   if (! pat)
2085     return 0;
2086   emit_insn (pat);
2087   return target;
2088 }
2089 
2090 /* Subroutine of arm_expand_builtin to take care of unop insns.  */
2091 
2092 static rtx
arm_expand_unop_builtin(enum insn_code icode,tree exp,rtx target,int do_load)2093 arm_expand_unop_builtin (enum insn_code icode,
2094 			 tree exp, rtx target, int do_load)
2095 {
2096   rtx pat;
2097   tree arg0 = CALL_EXPR_ARG (exp, 0);
2098   rtx op0 = expand_normal (arg0);
2099   rtx op1 = NULL_RTX;
2100   machine_mode tmode = insn_data[icode].operand[0].mode;
2101   machine_mode mode0 = insn_data[icode].operand[1].mode;
2102   bool builtin_sha1h_p = false;
2103 
2104   if (insn_data[icode].n_operands == 3)
2105     {
2106       gcc_assert (icode == CODE_FOR_crypto_sha1h);
2107       builtin_sha1h_p = true;
2108     }
2109 
2110   if (! target
2111       || GET_MODE (target) != tmode
2112       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2113     target = gen_reg_rtx (tmode);
2114   if (do_load)
2115     op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
2116   else
2117     {
2118       if (VECTOR_MODE_P (mode0))
2119 	op0 = safe_vector_operand (op0, mode0);
2120 
2121       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2122 	op0 = copy_to_mode_reg (mode0, op0);
2123     }
2124   if (builtin_sha1h_p)
2125     op1 = GEN_INT (TARGET_BIG_END ? 1 : 0);
2126 
2127   if (builtin_sha1h_p)
2128     pat = GEN_FCN (icode) (target, op0, op1);
2129   else
2130     pat = GEN_FCN (icode) (target, op0);
2131   if (! pat)
2132     return 0;
2133   emit_insn (pat);
2134   return target;
2135 }
2136 
2137 typedef enum {
2138   ARG_BUILTIN_COPY_TO_REG,
2139   ARG_BUILTIN_CONSTANT,
2140   ARG_BUILTIN_LANE_INDEX,
2141   ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX,
2142   ARG_BUILTIN_LANE_PAIR_INDEX,
2143   ARG_BUILTIN_NEON_MEMORY,
2144   ARG_BUILTIN_MEMORY,
2145   ARG_BUILTIN_STOP
2146 } builtin_arg;
2147 
2148 
2149 /* EXP is a pointer argument to a Neon load or store intrinsic.  Derive
2150    and return an expression for the accessed memory.
2151 
2152    The intrinsic function operates on a block of registers that has
2153    mode REG_MODE.  This block contains vectors of type TYPE_MODE.  The
2154    function references the memory at EXP of type TYPE and in mode
2155    MEM_MODE; this mode may be BLKmode if no more suitable mode is
2156    available.  */
2157 
2158 static tree
neon_dereference_pointer(tree exp,tree type,machine_mode mem_mode,machine_mode reg_mode,machine_mode vector_mode)2159 neon_dereference_pointer (tree exp, tree type, machine_mode mem_mode,
2160 			  machine_mode reg_mode,
2161 			  machine_mode vector_mode)
2162 {
2163   HOST_WIDE_INT reg_size, vector_size, nvectors, nelems;
2164   tree elem_type, upper_bound, array_type;
2165 
2166   /* Work out the size of the register block in bytes.  */
2167   reg_size = GET_MODE_SIZE (reg_mode);
2168 
2169   /* Work out the size of each vector in bytes.  */
2170   vector_size = GET_MODE_SIZE (vector_mode);
2171 
2172   /* Work out how many vectors there are.  */
2173   gcc_assert (reg_size % vector_size == 0);
2174   nvectors = reg_size / vector_size;
2175 
2176   /* Work out the type of each element.  */
2177   gcc_assert (POINTER_TYPE_P (type));
2178   elem_type = TREE_TYPE (type);
2179 
2180   /* Work out how many elements are being loaded or stored.
2181      MEM_MODE == REG_MODE implies a one-to-one mapping between register
2182      and memory elements; anything else implies a lane load or store.  */
2183   if (mem_mode == reg_mode)
2184     nelems = vector_size * nvectors / int_size_in_bytes (elem_type);
2185   else
2186     nelems = nvectors;
2187 
2188   /* Create a type that describes the full access.  */
2189   upper_bound = build_int_cst (size_type_node, nelems - 1);
2190   array_type = build_array_type (elem_type, build_index_type (upper_bound));
2191 
2192   /* Dereference EXP using that type.  */
2193   return fold_build2 (MEM_REF, array_type, exp,
2194 		      build_int_cst (build_pointer_type (array_type), 0));
2195 }
2196 
2197 /* Expand a builtin.  */
2198 static rtx
arm_expand_builtin_args(rtx target,machine_mode map_mode,int fcode,int icode,int have_retval,tree exp,builtin_arg * args)2199 arm_expand_builtin_args (rtx target, machine_mode map_mode, int fcode,
2200 		      int icode, int have_retval, tree exp,
2201 		      builtin_arg *args)
2202 {
2203   rtx pat;
2204   tree arg[SIMD_MAX_BUILTIN_ARGS];
2205   rtx op[SIMD_MAX_BUILTIN_ARGS];
2206   machine_mode tmode = insn_data[icode].operand[0].mode;
2207   machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
2208   tree formals;
2209   int argc = 0;
2210   rtx_insn * insn;
2211 
2212   if (have_retval
2213       && (!target
2214 	  || GET_MODE (target) != tmode
2215 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
2216     target = gen_reg_rtx (tmode);
2217 
2218   formals = TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls[fcode]));
2219 
2220   for (;;)
2221     {
2222       builtin_arg thisarg = args[argc];
2223 
2224       if (thisarg == ARG_BUILTIN_STOP)
2225 	break;
2226       else
2227 	{
2228 	  int opno = argc + have_retval;
2229 	  arg[argc] = CALL_EXPR_ARG (exp, argc);
2230 	  mode[argc] = insn_data[icode].operand[opno].mode;
2231 	  if (thisarg == ARG_BUILTIN_NEON_MEMORY)
2232             {
2233               machine_mode other_mode
2234 		= insn_data[icode].operand[1 - opno].mode;
2235               arg[argc] = neon_dereference_pointer (arg[argc],
2236 						    TREE_VALUE (formals),
2237 						    mode[argc], other_mode,
2238 						    map_mode);
2239             }
2240 
2241 	  /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
2242 	     ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned.  */
2243 	  op[argc] = expand_expr (arg[argc], NULL_RTX, VOIDmode,
2244 				  ((thisarg == ARG_BUILTIN_MEMORY
2245 				    || thisarg == ARG_BUILTIN_NEON_MEMORY)
2246 				   ? EXPAND_MEMORY : EXPAND_NORMAL));
2247 
2248 	  switch (thisarg)
2249 	    {
2250 	    case ARG_BUILTIN_MEMORY:
2251 	    case ARG_BUILTIN_COPY_TO_REG:
2252 	      if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
2253 		op[argc] = convert_memory_address (Pmode, op[argc]);
2254 	      /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
2255 	      if (!(*insn_data[icode].operand[opno].predicate)
2256 		  (op[argc], mode[argc]))
2257 		op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
2258 	      break;
2259 
2260 	    case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX:
2261 	      gcc_assert (argc > 1);
2262 	      if (CONST_INT_P (op[argc]))
2263 		{
2264 		  neon_lane_bounds (op[argc], 0,
2265 				    GET_MODE_NUNITS (map_mode), exp);
2266 		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
2267 		  op[argc] =
2268 		    GEN_INT (NEON_ENDIAN_LANE_N (map_mode, INTVAL (op[argc])));
2269 		}
2270 	      goto constant_arg;
2271 
2272 	    case ARG_BUILTIN_LANE_INDEX:
2273 	      /* Previous argument must be a vector, which this indexes.  */
2274 	      gcc_assert (argc > 0);
2275 	      if (CONST_INT_P (op[argc]))
2276 		{
2277 		  machine_mode vmode = mode[argc - 1];
2278 		  neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode), exp);
2279 		}
2280 	      /* If the lane index isn't a constant then error out.  */
2281 	      goto constant_arg;
2282 
2283 	    case ARG_BUILTIN_LANE_PAIR_INDEX:
2284 	      /* Previous argument must be a vector, which this indexes. The
2285 		 indexing will always select i and i+1 out of the vector, which
2286 		 puts a limit on i.  */
2287 	      gcc_assert (argc > 0);
2288 	      if (CONST_INT_P (op[argc]))
2289 		{
2290 		  machine_mode vmode = mode[argc - 1];
2291 		  neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode) / 2, exp);
2292 		}
2293 	      /* If the lane index isn't a constant then the next
2294 		 case will error.  */
2295 	      /* Fall through.  */
2296 	    case ARG_BUILTIN_CONSTANT:
2297 constant_arg:
2298 	      if (!(*insn_data[icode].operand[opno].predicate)
2299 		  (op[argc], mode[argc]))
2300 		{
2301 		  error ("%Kargument %d must be a constant immediate",
2302 			 exp, argc + 1);
2303 		  /* We have failed to expand the pattern, and are safely
2304 		     in to invalid code.  But the mid-end will still try to
2305 		     build an assignment for this node while it expands,
2306 		     before stopping for the error, just pass it back
2307 		     TARGET to ensure a valid assignment.  */
2308 		  return target;
2309 		}
2310 	      break;
2311 
2312 	      case ARG_BUILTIN_NEON_MEMORY:
2313 	      /* Check if expand failed.  */
2314 	      if (op[argc] == const0_rtx)
2315 		return 0;
2316 	      gcc_assert (MEM_P (op[argc]));
2317 	      PUT_MODE (op[argc], mode[argc]);
2318 	      /* ??? arm_neon.h uses the same built-in functions for signed
2319 		 and unsigned accesses, casting where necessary.  This isn't
2320 		 alias safe.  */
2321 	      set_mem_alias_set (op[argc], 0);
2322 	      if (!(*insn_data[icode].operand[opno].predicate)
2323                    (op[argc], mode[argc]))
2324 		op[argc] = (replace_equiv_address
2325 			    (op[argc],
2326 			     copy_to_mode_reg (Pmode, XEXP (op[argc], 0))));
2327               break;
2328 
2329 	    case ARG_BUILTIN_STOP:
2330 	      gcc_unreachable ();
2331 	    }
2332 
2333 	  argc++;
2334 	}
2335     }
2336 
2337   if (have_retval)
2338     switch (argc)
2339       {
2340       case 1:
2341 	pat = GEN_FCN (icode) (target, op[0]);
2342 	break;
2343 
2344       case 2:
2345 	pat = GEN_FCN (icode) (target, op[0], op[1]);
2346 	break;
2347 
2348       case 3:
2349 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
2350 	break;
2351 
2352       case 4:
2353 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
2354 	break;
2355 
2356       case 5:
2357 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
2358 	break;
2359 
2360       case 6:
2361 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
2362 	break;
2363 
2364       default:
2365 	gcc_unreachable ();
2366       }
2367   else
2368     switch (argc)
2369       {
2370       case 1:
2371 	pat = GEN_FCN (icode) (op[0]);
2372 	break;
2373 
2374       case 2:
2375 	pat = GEN_FCN (icode) (op[0], op[1]);
2376 	break;
2377 
2378       case 3:
2379 	pat = GEN_FCN (icode) (op[0], op[1], op[2]);
2380 	break;
2381 
2382       case 4:
2383 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
2384 	break;
2385 
2386       case 5:
2387 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
2388 	break;
2389 
2390       case 6:
2391 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
2392 	break;
2393 
2394       default:
2395 	gcc_unreachable ();
2396       }
2397 
2398   if (!pat)
2399     return 0;
2400 
2401   /* Check whether our current target implements the pattern chosen for this
2402      builtin and error out if not.  */
2403   start_sequence ();
2404   emit_insn (pat);
2405   insn = get_insns ();
2406   end_sequence ();
2407 
2408   if (recog_memoized (insn) < 0)
2409     error ("this builtin is not supported for this target");
2410   else
2411     emit_insn (insn);
2412 
2413   return target;
2414 }
2415 
2416 /* Expand a builtin.  These builtins are "special" because they don't have
2417    symbolic constants defined per-instruction or per instruction-variant.
2418    Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
2419    is passed into the function.  */
2420 
2421 static rtx
arm_expand_builtin_1(int fcode,tree exp,rtx target,arm_builtin_datum * d)2422 arm_expand_builtin_1 (int fcode, tree exp, rtx target,
2423 			   arm_builtin_datum *d)
2424 {
2425   enum insn_code icode = d->code;
2426   builtin_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
2427   int num_args = insn_data[d->code].n_operands;
2428   int is_void = 0;
2429   int k;
2430   bool neon = false;
2431 
2432   if (IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
2433     neon = true;
2434 
2435   is_void = !!(d->qualifiers[0] & qualifier_void);
2436 
2437   num_args += is_void;
2438 
2439   for (k = 1; k < num_args; k++)
2440     {
2441       /* We have four arrays of data, each indexed in a different fashion.
2442 	 qualifiers - element 0 always describes the function return type.
2443 	 operands - element 0 is either the operand for return value (if
2444 	 the function has a non-void return type) or the operand for the
2445 	 first argument.
2446 	 expr_args - element 0 always holds the first argument.
2447 	 args - element 0 is always used for the return type.  */
2448       int qualifiers_k = k;
2449       int operands_k = k - is_void;
2450       int expr_args_k = k - 1;
2451 
2452       if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
2453 	args[k] = ARG_BUILTIN_LANE_INDEX;
2454       else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
2455 	args[k] = ARG_BUILTIN_LANE_PAIR_INDEX;
2456       else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
2457 	args[k] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX;
2458       else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
2459 	args[k] = ARG_BUILTIN_CONSTANT;
2460       else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
2461 	{
2462 	  rtx arg
2463 	    = expand_normal (CALL_EXPR_ARG (exp,
2464 					    (expr_args_k)));
2465 	  /* Handle constants only if the predicate allows it.  */
2466 	  bool op_const_int_p =
2467 	    (CONST_INT_P (arg)
2468 	     && (*insn_data[icode].operand[operands_k].predicate)
2469 	     (arg, insn_data[icode].operand[operands_k].mode));
2470 	  args[k] = op_const_int_p ? ARG_BUILTIN_CONSTANT : ARG_BUILTIN_COPY_TO_REG;
2471 	}
2472       else if (d->qualifiers[qualifiers_k] & qualifier_pointer)
2473 	{
2474 	  if (neon)
2475 	    args[k] = ARG_BUILTIN_NEON_MEMORY;
2476 	  else
2477 	    args[k] = ARG_BUILTIN_MEMORY;
2478 	}
2479       else
2480 	args[k] = ARG_BUILTIN_COPY_TO_REG;
2481     }
2482   args[k] = ARG_BUILTIN_STOP;
2483 
2484   /* The interface to arm_expand_builtin_args expects a 0 if
2485      the function is void, and a 1 if it is not.  */
2486   return arm_expand_builtin_args
2487     (target, d->mode, fcode, icode, !is_void, exp,
2488      &args[1]);
2489 }
2490 
2491 /* Expand an ACLE builtin, i.e. those registered only if their respective
2492    target constraints are met.  This check happens within
2493    arm_expand_builtin_args.  */
2494 
2495 static rtx
arm_expand_acle_builtin(int fcode,tree exp,rtx target)2496 arm_expand_acle_builtin (int fcode, tree exp, rtx target)
2497 {
2498 
2499   arm_builtin_datum *d
2500     = &acle_builtin_data[fcode - ARM_BUILTIN_ACLE_PATTERN_START];
2501 
2502   return arm_expand_builtin_1 (fcode, exp, target, d);
2503 }
2504 
2505 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
2506    Most of these are "special" because they don't have symbolic
2507    constants defined per-instruction or per instruction-variant.  Instead, the
2508    required info is looked up in the table neon_builtin_data.  */
2509 
2510 static rtx
arm_expand_neon_builtin(int fcode,tree exp,rtx target)2511 arm_expand_neon_builtin (int fcode, tree exp, rtx target)
2512 {
2513   if (fcode >= ARM_BUILTIN_NEON_BASE && ! TARGET_NEON)
2514     {
2515       fatal_error (input_location,
2516 		   "You must enable NEON instructions"
2517 		   " (e.g. %<-mfloat-abi=softfp%> %<-mfpu=neon%>)"
2518 		   " to use these intrinsics.");
2519       return const0_rtx;
2520     }
2521 
2522   if (fcode == ARM_BUILTIN_NEON_LANE_CHECK)
2523     {
2524       /* Builtin is only to check bounds of the lane passed to some intrinsics
2525 	 that are implemented with gcc vector extensions in arm_neon.h.  */
2526 
2527       tree nlanes = CALL_EXPR_ARG (exp, 0);
2528       gcc_assert (TREE_CODE (nlanes) == INTEGER_CST);
2529       rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 1));
2530       if (CONST_INT_P (lane_idx))
2531 	neon_lane_bounds (lane_idx, 0, TREE_INT_CST_LOW (nlanes), exp);
2532       else
2533 	error ("%Klane index must be a constant immediate", exp);
2534       /* Don't generate any RTL.  */
2535       return const0_rtx;
2536     }
2537 
2538   arm_builtin_datum *d
2539     = &neon_builtin_data[fcode - ARM_BUILTIN_NEON_PATTERN_START];
2540 
2541   return arm_expand_builtin_1 (fcode, exp, target, d);
2542 }
2543 
2544 /* Expand a VFP builtin.  These builtins are treated like
2545    neon builtins except that the data is looked up in table
2546    VFP_BUILTIN_DATA.  */
2547 
2548 static rtx
arm_expand_vfp_builtin(int fcode,tree exp,rtx target)2549 arm_expand_vfp_builtin (int fcode, tree exp, rtx target)
2550 {
2551   if (fcode >= ARM_BUILTIN_VFP_BASE && ! TARGET_HARD_FLOAT)
2552     {
2553       fatal_error (input_location,
2554 		   "You must enable VFP instructions"
2555 		   " to use these intrinsics.");
2556       return const0_rtx;
2557     }
2558 
2559   arm_builtin_datum *d
2560     = &vfp_builtin_data[fcode - ARM_BUILTIN_VFP_PATTERN_START];
2561 
2562   return arm_expand_builtin_1 (fcode, exp, target, d);
2563 }
2564 
2565 /* Expand an expression EXP that calls a built-in function,
2566    with result going to TARGET if that's convenient
2567    (and in mode MODE if that's convenient).
2568    SUBTARGET may be used as the target for computing one of EXP's operands.
2569    IGNORE is nonzero if the value is to be ignored.  */
2570 
2571 rtx
arm_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2572 arm_expand_builtin (tree exp,
2573 		    rtx target,
2574 		    rtx subtarget ATTRIBUTE_UNUSED,
2575 		    machine_mode mode ATTRIBUTE_UNUSED,
2576 		    int ignore ATTRIBUTE_UNUSED)
2577 {
2578   const struct builtin_description * d;
2579   enum insn_code    icode;
2580   tree              fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2581   tree              arg0;
2582   tree              arg1;
2583   tree              arg2;
2584   rtx               op0;
2585   rtx               op1;
2586   rtx               op2;
2587   rtx               pat;
2588   unsigned int      fcode = DECL_FUNCTION_CODE (fndecl);
2589   size_t            i;
2590   machine_mode tmode;
2591   machine_mode mode0;
2592   machine_mode mode1;
2593   machine_mode mode2;
2594   int opint;
2595   int selector;
2596   int mask;
2597   int imm;
2598 
2599   if (fcode >= ARM_BUILTIN_ACLE_BASE)
2600     return arm_expand_acle_builtin (fcode, exp, target);
2601 
2602   if (fcode >= ARM_BUILTIN_NEON_BASE)
2603     return arm_expand_neon_builtin (fcode, exp, target);
2604 
2605   if (fcode >= ARM_BUILTIN_VFP_BASE)
2606     return arm_expand_vfp_builtin (fcode, exp, target);
2607 
2608   /* Check in the context of the function making the call whether the
2609      builtin is supported.  */
2610   if (fcode >= ARM_BUILTIN_CRYPTO_BASE
2611       && (!TARGET_CRYPTO || !TARGET_HARD_FLOAT))
2612     {
2613       fatal_error (input_location,
2614 		   "You must enable crypto instructions"
2615 		   " (e.g. include %<-mfloat-abi=softfp%> "
2616 		   "%<-mfpu=crypto-neon%>)"
2617 		   " to use these intrinsics.");
2618       return const0_rtx;
2619     }
2620 
2621   switch (fcode)
2622     {
2623     case ARM_BUILTIN_GET_FPSCR:
2624     case ARM_BUILTIN_SET_FPSCR:
2625       if (fcode == ARM_BUILTIN_GET_FPSCR)
2626 	{
2627 	  icode = CODE_FOR_get_fpscr;
2628 	  target = gen_reg_rtx (SImode);
2629 	  pat = GEN_FCN (icode) (target);
2630 	}
2631       else
2632 	{
2633 	  target = NULL_RTX;
2634 	  icode = CODE_FOR_set_fpscr;
2635 	  arg0 = CALL_EXPR_ARG (exp, 0);
2636 	  op0 = expand_normal (arg0);
2637 	  pat = GEN_FCN (icode) (force_reg (SImode, op0));
2638 	}
2639       emit_insn (pat);
2640       return target;
2641 
2642     case ARM_BUILTIN_CMSE_NONSECURE_CALLER:
2643       target = gen_reg_rtx (SImode);
2644       op0 = arm_return_addr (0, NULL_RTX);
2645       emit_insn (gen_andsi3 (target, op0, const1_rtx));
2646       op1 = gen_rtx_EQ (SImode, target, const0_rtx);
2647       emit_insn (gen_cstoresi4 (target, op1, target, const0_rtx));
2648       return target;
2649 
2650     case ARM_BUILTIN_TEXTRMSB:
2651     case ARM_BUILTIN_TEXTRMUB:
2652     case ARM_BUILTIN_TEXTRMSH:
2653     case ARM_BUILTIN_TEXTRMUH:
2654     case ARM_BUILTIN_TEXTRMSW:
2655     case ARM_BUILTIN_TEXTRMUW:
2656       icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
2657 	       : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
2658 	       : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
2659 	       : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
2660 	       : CODE_FOR_iwmmxt_textrmw);
2661 
2662       arg0 = CALL_EXPR_ARG (exp, 0);
2663       arg1 = CALL_EXPR_ARG (exp, 1);
2664       op0 = expand_normal (arg0);
2665       op1 = expand_normal (arg1);
2666       tmode = insn_data[icode].operand[0].mode;
2667       mode0 = insn_data[icode].operand[1].mode;
2668       mode1 = insn_data[icode].operand[2].mode;
2669 
2670       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2671 	op0 = copy_to_mode_reg (mode0, op0);
2672       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2673 	{
2674 	  /* @@@ better error message */
2675 	  error ("selector must be an immediate");
2676 	  return gen_reg_rtx (tmode);
2677 	}
2678 
2679       opint = INTVAL (op1);
2680       if (fcode == ARM_BUILTIN_TEXTRMSB || fcode == ARM_BUILTIN_TEXTRMUB)
2681 	{
2682 	  if (opint > 7 || opint < 0)
2683 	    error ("the range of selector should be in 0 to 7");
2684 	}
2685       else if (fcode == ARM_BUILTIN_TEXTRMSH || fcode == ARM_BUILTIN_TEXTRMUH)
2686 	{
2687 	  if (opint > 3 || opint < 0)
2688 	    error ("the range of selector should be in 0 to 3");
2689 	}
2690       else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW.  */
2691 	{
2692 	  if (opint > 1 || opint < 0)
2693 	    error ("the range of selector should be in 0 to 1");
2694 	}
2695 
2696       if (target == 0
2697 	  || GET_MODE (target) != tmode
2698 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2699 	target = gen_reg_rtx (tmode);
2700       pat = GEN_FCN (icode) (target, op0, op1);
2701       if (! pat)
2702 	return 0;
2703       emit_insn (pat);
2704       return target;
2705 
2706     case ARM_BUILTIN_WALIGNI:
2707       /* If op2 is immediate, call walighi, else call walighr.  */
2708       arg0 = CALL_EXPR_ARG (exp, 0);
2709       arg1 = CALL_EXPR_ARG (exp, 1);
2710       arg2 = CALL_EXPR_ARG (exp, 2);
2711       op0 = expand_normal (arg0);
2712       op1 = expand_normal (arg1);
2713       op2 = expand_normal (arg2);
2714       if (CONST_INT_P (op2))
2715         {
2716 	  icode = CODE_FOR_iwmmxt_waligni;
2717           tmode = insn_data[icode].operand[0].mode;
2718 	  mode0 = insn_data[icode].operand[1].mode;
2719 	  mode1 = insn_data[icode].operand[2].mode;
2720 	  mode2 = insn_data[icode].operand[3].mode;
2721           if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
2722 	    op0 = copy_to_mode_reg (mode0, op0);
2723           if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
2724 	    op1 = copy_to_mode_reg (mode1, op1);
2725           gcc_assert ((*insn_data[icode].operand[3].predicate) (op2, mode2));
2726 	  selector = INTVAL (op2);
2727 	  if (selector > 7 || selector < 0)
2728 	    error ("the range of selector should be in 0 to 7");
2729 	}
2730       else
2731         {
2732 	  icode = CODE_FOR_iwmmxt_walignr;
2733           tmode = insn_data[icode].operand[0].mode;
2734 	  mode0 = insn_data[icode].operand[1].mode;
2735 	  mode1 = insn_data[icode].operand[2].mode;
2736 	  mode2 = insn_data[icode].operand[3].mode;
2737           if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
2738 	    op0 = copy_to_mode_reg (mode0, op0);
2739           if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
2740 	    op1 = copy_to_mode_reg (mode1, op1);
2741           if (!(*insn_data[icode].operand[3].predicate) (op2, mode2))
2742 	    op2 = copy_to_mode_reg (mode2, op2);
2743 	}
2744       if (target == 0
2745 	  || GET_MODE (target) != tmode
2746 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
2747 	target = gen_reg_rtx (tmode);
2748       pat = GEN_FCN (icode) (target, op0, op1, op2);
2749       if (!pat)
2750 	return 0;
2751       emit_insn (pat);
2752       return target;
2753 
2754     case ARM_BUILTIN_TINSRB:
2755     case ARM_BUILTIN_TINSRH:
2756     case ARM_BUILTIN_TINSRW:
2757     case ARM_BUILTIN_WMERGE:
2758       icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
2759 	       : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
2760 	       : fcode == ARM_BUILTIN_WMERGE ? CODE_FOR_iwmmxt_wmerge
2761 	       : CODE_FOR_iwmmxt_tinsrw);
2762       arg0 = CALL_EXPR_ARG (exp, 0);
2763       arg1 = CALL_EXPR_ARG (exp, 1);
2764       arg2 = CALL_EXPR_ARG (exp, 2);
2765       op0 = expand_normal (arg0);
2766       op1 = expand_normal (arg1);
2767       op2 = expand_normal (arg2);
2768       tmode = insn_data[icode].operand[0].mode;
2769       mode0 = insn_data[icode].operand[1].mode;
2770       mode1 = insn_data[icode].operand[2].mode;
2771       mode2 = insn_data[icode].operand[3].mode;
2772 
2773       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2774 	op0 = copy_to_mode_reg (mode0, op0);
2775       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2776 	op1 = copy_to_mode_reg (mode1, op1);
2777       if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2778 	{
2779 	  error ("selector must be an immediate");
2780 	  return const0_rtx;
2781 	}
2782       if (icode == CODE_FOR_iwmmxt_wmerge)
2783 	{
2784 	  selector = INTVAL (op2);
2785 	  if (selector > 7 || selector < 0)
2786 	    error ("the range of selector should be in 0 to 7");
2787 	}
2788       if ((icode == CODE_FOR_iwmmxt_tinsrb)
2789 	  || (icode == CODE_FOR_iwmmxt_tinsrh)
2790 	  || (icode == CODE_FOR_iwmmxt_tinsrw))
2791         {
2792 	  mask = 0x01;
2793 	  selector= INTVAL (op2);
2794 	  if (icode == CODE_FOR_iwmmxt_tinsrb && (selector < 0 || selector > 7))
2795 	    error ("the range of selector should be in 0 to 7");
2796 	  else if (icode == CODE_FOR_iwmmxt_tinsrh && (selector < 0 ||selector > 3))
2797 	    error ("the range of selector should be in 0 to 3");
2798 	  else if (icode == CODE_FOR_iwmmxt_tinsrw && (selector < 0 ||selector > 1))
2799 	    error ("the range of selector should be in 0 to 1");
2800 	  mask <<= selector;
2801 	  op2 = GEN_INT (mask);
2802 	}
2803       if (target == 0
2804 	  || GET_MODE (target) != tmode
2805 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2806 	target = gen_reg_rtx (tmode);
2807       pat = GEN_FCN (icode) (target, op0, op1, op2);
2808       if (! pat)
2809 	return 0;
2810       emit_insn (pat);
2811       return target;
2812 
2813     case ARM_BUILTIN_SETWCGR0:
2814     case ARM_BUILTIN_SETWCGR1:
2815     case ARM_BUILTIN_SETWCGR2:
2816     case ARM_BUILTIN_SETWCGR3:
2817       icode = (fcode == ARM_BUILTIN_SETWCGR0 ? CODE_FOR_iwmmxt_setwcgr0
2818 	       : fcode == ARM_BUILTIN_SETWCGR1 ? CODE_FOR_iwmmxt_setwcgr1
2819 	       : fcode == ARM_BUILTIN_SETWCGR2 ? CODE_FOR_iwmmxt_setwcgr2
2820 	       : CODE_FOR_iwmmxt_setwcgr3);
2821       arg0 = CALL_EXPR_ARG (exp, 0);
2822       op0 = expand_normal (arg0);
2823       mode0 = insn_data[icode].operand[0].mode;
2824       if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
2825         op0 = copy_to_mode_reg (mode0, op0);
2826       pat = GEN_FCN (icode) (op0);
2827       if (!pat)
2828 	return 0;
2829       emit_insn (pat);
2830       return 0;
2831 
2832     case ARM_BUILTIN_GETWCGR0:
2833     case ARM_BUILTIN_GETWCGR1:
2834     case ARM_BUILTIN_GETWCGR2:
2835     case ARM_BUILTIN_GETWCGR3:
2836       icode = (fcode == ARM_BUILTIN_GETWCGR0 ? CODE_FOR_iwmmxt_getwcgr0
2837 	       : fcode == ARM_BUILTIN_GETWCGR1 ? CODE_FOR_iwmmxt_getwcgr1
2838 	       : fcode == ARM_BUILTIN_GETWCGR2 ? CODE_FOR_iwmmxt_getwcgr2
2839 	       : CODE_FOR_iwmmxt_getwcgr3);
2840       tmode = insn_data[icode].operand[0].mode;
2841       if (target == 0
2842 	  || GET_MODE (target) != tmode
2843 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
2844         target = gen_reg_rtx (tmode);
2845       pat = GEN_FCN (icode) (target);
2846       if (!pat)
2847         return 0;
2848       emit_insn (pat);
2849       return target;
2850 
2851     case ARM_BUILTIN_WSHUFH:
2852       icode = CODE_FOR_iwmmxt_wshufh;
2853       arg0 = CALL_EXPR_ARG (exp, 0);
2854       arg1 = CALL_EXPR_ARG (exp, 1);
2855       op0 = expand_normal (arg0);
2856       op1 = expand_normal (arg1);
2857       tmode = insn_data[icode].operand[0].mode;
2858       mode1 = insn_data[icode].operand[1].mode;
2859       mode2 = insn_data[icode].operand[2].mode;
2860 
2861       if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
2862 	op0 = copy_to_mode_reg (mode1, op0);
2863       if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
2864 	{
2865 	  error ("mask must be an immediate");
2866 	  return const0_rtx;
2867 	}
2868       selector = INTVAL (op1);
2869       if (selector < 0 || selector > 255)
2870 	error ("the range of mask should be in 0 to 255");
2871       if (target == 0
2872 	  || GET_MODE (target) != tmode
2873 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2874 	target = gen_reg_rtx (tmode);
2875       pat = GEN_FCN (icode) (target, op0, op1);
2876       if (! pat)
2877 	return 0;
2878       emit_insn (pat);
2879       return target;
2880 
2881     case ARM_BUILTIN_WMADDS:
2882       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds, exp, target);
2883     case ARM_BUILTIN_WMADDSX:
2884       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx, exp, target);
2885     case ARM_BUILTIN_WMADDSN:
2886       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn, exp, target);
2887     case ARM_BUILTIN_WMADDU:
2888       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu, exp, target);
2889     case ARM_BUILTIN_WMADDUX:
2890       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux, exp, target);
2891     case ARM_BUILTIN_WMADDUN:
2892       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun, exp, target);
2893     case ARM_BUILTIN_WSADBZ:
2894       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, exp, target);
2895     case ARM_BUILTIN_WSADHZ:
2896       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, exp, target);
2897 
2898       /* Several three-argument builtins.  */
2899     case ARM_BUILTIN_WMACS:
2900     case ARM_BUILTIN_WMACU:
2901     case ARM_BUILTIN_TMIA:
2902     case ARM_BUILTIN_TMIAPH:
2903     case ARM_BUILTIN_TMIATT:
2904     case ARM_BUILTIN_TMIATB:
2905     case ARM_BUILTIN_TMIABT:
2906     case ARM_BUILTIN_TMIABB:
2907     case ARM_BUILTIN_WQMIABB:
2908     case ARM_BUILTIN_WQMIABT:
2909     case ARM_BUILTIN_WQMIATB:
2910     case ARM_BUILTIN_WQMIATT:
2911     case ARM_BUILTIN_WQMIABBN:
2912     case ARM_BUILTIN_WQMIABTN:
2913     case ARM_BUILTIN_WQMIATBN:
2914     case ARM_BUILTIN_WQMIATTN:
2915     case ARM_BUILTIN_WMIABB:
2916     case ARM_BUILTIN_WMIABT:
2917     case ARM_BUILTIN_WMIATB:
2918     case ARM_BUILTIN_WMIATT:
2919     case ARM_BUILTIN_WMIABBN:
2920     case ARM_BUILTIN_WMIABTN:
2921     case ARM_BUILTIN_WMIATBN:
2922     case ARM_BUILTIN_WMIATTN:
2923     case ARM_BUILTIN_WMIAWBB:
2924     case ARM_BUILTIN_WMIAWBT:
2925     case ARM_BUILTIN_WMIAWTB:
2926     case ARM_BUILTIN_WMIAWTT:
2927     case ARM_BUILTIN_WMIAWBBN:
2928     case ARM_BUILTIN_WMIAWBTN:
2929     case ARM_BUILTIN_WMIAWTBN:
2930     case ARM_BUILTIN_WMIAWTTN:
2931     case ARM_BUILTIN_WSADB:
2932     case ARM_BUILTIN_WSADH:
2933       icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
2934 	       : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
2935 	       : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
2936 	       : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
2937 	       : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
2938 	       : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
2939 	       : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
2940 	       : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
2941 	       : fcode == ARM_BUILTIN_WQMIABB ? CODE_FOR_iwmmxt_wqmiabb
2942 	       : fcode == ARM_BUILTIN_WQMIABT ? CODE_FOR_iwmmxt_wqmiabt
2943 	       : fcode == ARM_BUILTIN_WQMIATB ? CODE_FOR_iwmmxt_wqmiatb
2944 	       : fcode == ARM_BUILTIN_WQMIATT ? CODE_FOR_iwmmxt_wqmiatt
2945 	       : fcode == ARM_BUILTIN_WQMIABBN ? CODE_FOR_iwmmxt_wqmiabbn
2946 	       : fcode == ARM_BUILTIN_WQMIABTN ? CODE_FOR_iwmmxt_wqmiabtn
2947 	       : fcode == ARM_BUILTIN_WQMIATBN ? CODE_FOR_iwmmxt_wqmiatbn
2948 	       : fcode == ARM_BUILTIN_WQMIATTN ? CODE_FOR_iwmmxt_wqmiattn
2949 	       : fcode == ARM_BUILTIN_WMIABB ? CODE_FOR_iwmmxt_wmiabb
2950 	       : fcode == ARM_BUILTIN_WMIABT ? CODE_FOR_iwmmxt_wmiabt
2951 	       : fcode == ARM_BUILTIN_WMIATB ? CODE_FOR_iwmmxt_wmiatb
2952 	       : fcode == ARM_BUILTIN_WMIATT ? CODE_FOR_iwmmxt_wmiatt
2953 	       : fcode == ARM_BUILTIN_WMIABBN ? CODE_FOR_iwmmxt_wmiabbn
2954 	       : fcode == ARM_BUILTIN_WMIABTN ? CODE_FOR_iwmmxt_wmiabtn
2955 	       : fcode == ARM_BUILTIN_WMIATBN ? CODE_FOR_iwmmxt_wmiatbn
2956 	       : fcode == ARM_BUILTIN_WMIATTN ? CODE_FOR_iwmmxt_wmiattn
2957 	       : fcode == ARM_BUILTIN_WMIAWBB ? CODE_FOR_iwmmxt_wmiawbb
2958 	       : fcode == ARM_BUILTIN_WMIAWBT ? CODE_FOR_iwmmxt_wmiawbt
2959 	       : fcode == ARM_BUILTIN_WMIAWTB ? CODE_FOR_iwmmxt_wmiawtb
2960 	       : fcode == ARM_BUILTIN_WMIAWTT ? CODE_FOR_iwmmxt_wmiawtt
2961 	       : fcode == ARM_BUILTIN_WMIAWBBN ? CODE_FOR_iwmmxt_wmiawbbn
2962 	       : fcode == ARM_BUILTIN_WMIAWBTN ? CODE_FOR_iwmmxt_wmiawbtn
2963 	       : fcode == ARM_BUILTIN_WMIAWTBN ? CODE_FOR_iwmmxt_wmiawtbn
2964 	       : fcode == ARM_BUILTIN_WMIAWTTN ? CODE_FOR_iwmmxt_wmiawttn
2965 	       : fcode == ARM_BUILTIN_WSADB ? CODE_FOR_iwmmxt_wsadb
2966 	       : CODE_FOR_iwmmxt_wsadh);
2967       arg0 = CALL_EXPR_ARG (exp, 0);
2968       arg1 = CALL_EXPR_ARG (exp, 1);
2969       arg2 = CALL_EXPR_ARG (exp, 2);
2970       op0 = expand_normal (arg0);
2971       op1 = expand_normal (arg1);
2972       op2 = expand_normal (arg2);
2973       tmode = insn_data[icode].operand[0].mode;
2974       mode0 = insn_data[icode].operand[1].mode;
2975       mode1 = insn_data[icode].operand[2].mode;
2976       mode2 = insn_data[icode].operand[3].mode;
2977 
2978       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2979 	op0 = copy_to_mode_reg (mode0, op0);
2980       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2981 	op1 = copy_to_mode_reg (mode1, op1);
2982       if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2983 	op2 = copy_to_mode_reg (mode2, op2);
2984       if (target == 0
2985 	  || GET_MODE (target) != tmode
2986 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2987 	target = gen_reg_rtx (tmode);
2988       pat = GEN_FCN (icode) (target, op0, op1, op2);
2989       if (! pat)
2990 	return 0;
2991       emit_insn (pat);
2992       return target;
2993 
2994     case ARM_BUILTIN_WZERO:
2995       target = gen_reg_rtx (DImode);
2996       emit_insn (gen_iwmmxt_clrdi (target));
2997       return target;
2998 
2999     case ARM_BUILTIN_WSRLHI:
3000     case ARM_BUILTIN_WSRLWI:
3001     case ARM_BUILTIN_WSRLDI:
3002     case ARM_BUILTIN_WSLLHI:
3003     case ARM_BUILTIN_WSLLWI:
3004     case ARM_BUILTIN_WSLLDI:
3005     case ARM_BUILTIN_WSRAHI:
3006     case ARM_BUILTIN_WSRAWI:
3007     case ARM_BUILTIN_WSRADI:
3008     case ARM_BUILTIN_WRORHI:
3009     case ARM_BUILTIN_WRORWI:
3010     case ARM_BUILTIN_WRORDI:
3011     case ARM_BUILTIN_WSRLH:
3012     case ARM_BUILTIN_WSRLW:
3013     case ARM_BUILTIN_WSRLD:
3014     case ARM_BUILTIN_WSLLH:
3015     case ARM_BUILTIN_WSLLW:
3016     case ARM_BUILTIN_WSLLD:
3017     case ARM_BUILTIN_WSRAH:
3018     case ARM_BUILTIN_WSRAW:
3019     case ARM_BUILTIN_WSRAD:
3020     case ARM_BUILTIN_WRORH:
3021     case ARM_BUILTIN_WRORW:
3022     case ARM_BUILTIN_WRORD:
3023       icode = (fcode == ARM_BUILTIN_WSRLHI ? CODE_FOR_lshrv4hi3_iwmmxt
3024 	       : fcode == ARM_BUILTIN_WSRLWI ? CODE_FOR_lshrv2si3_iwmmxt
3025 	       : fcode == ARM_BUILTIN_WSRLDI ? CODE_FOR_lshrdi3_iwmmxt
3026 	       : fcode == ARM_BUILTIN_WSLLHI ? CODE_FOR_ashlv4hi3_iwmmxt
3027 	       : fcode == ARM_BUILTIN_WSLLWI ? CODE_FOR_ashlv2si3_iwmmxt
3028 	       : fcode == ARM_BUILTIN_WSLLDI ? CODE_FOR_ashldi3_iwmmxt
3029 	       : fcode == ARM_BUILTIN_WSRAHI ? CODE_FOR_ashrv4hi3_iwmmxt
3030 	       : fcode == ARM_BUILTIN_WSRAWI ? CODE_FOR_ashrv2si3_iwmmxt
3031 	       : fcode == ARM_BUILTIN_WSRADI ? CODE_FOR_ashrdi3_iwmmxt
3032 	       : fcode == ARM_BUILTIN_WRORHI ? CODE_FOR_rorv4hi3
3033 	       : fcode == ARM_BUILTIN_WRORWI ? CODE_FOR_rorv2si3
3034 	       : fcode == ARM_BUILTIN_WRORDI ? CODE_FOR_rordi3
3035 	       : fcode == ARM_BUILTIN_WSRLH  ? CODE_FOR_lshrv4hi3_di
3036 	       : fcode == ARM_BUILTIN_WSRLW  ? CODE_FOR_lshrv2si3_di
3037 	       : fcode == ARM_BUILTIN_WSRLD  ? CODE_FOR_lshrdi3_di
3038 	       : fcode == ARM_BUILTIN_WSLLH  ? CODE_FOR_ashlv4hi3_di
3039 	       : fcode == ARM_BUILTIN_WSLLW  ? CODE_FOR_ashlv2si3_di
3040 	       : fcode == ARM_BUILTIN_WSLLD  ? CODE_FOR_ashldi3_di
3041 	       : fcode == ARM_BUILTIN_WSRAH  ? CODE_FOR_ashrv4hi3_di
3042 	       : fcode == ARM_BUILTIN_WSRAW  ? CODE_FOR_ashrv2si3_di
3043 	       : fcode == ARM_BUILTIN_WSRAD  ? CODE_FOR_ashrdi3_di
3044 	       : fcode == ARM_BUILTIN_WRORH  ? CODE_FOR_rorv4hi3_di
3045 	       : fcode == ARM_BUILTIN_WRORW  ? CODE_FOR_rorv2si3_di
3046 	       : fcode == ARM_BUILTIN_WRORD  ? CODE_FOR_rordi3_di
3047 	       : CODE_FOR_nothing);
3048       arg1 = CALL_EXPR_ARG (exp, 1);
3049       op1 = expand_normal (arg1);
3050       if (GET_MODE (op1) == VOIDmode)
3051 	{
3052 	  imm = INTVAL (op1);
3053 	  if ((fcode == ARM_BUILTIN_WRORHI || fcode == ARM_BUILTIN_WRORWI
3054 	       || fcode == ARM_BUILTIN_WRORH || fcode == ARM_BUILTIN_WRORW)
3055 	      && (imm < 0 || imm > 32))
3056 	    {
3057 	      if (fcode == ARM_BUILTIN_WRORHI)
3058 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_rori_pi16 in code.");
3059 	      else if (fcode == ARM_BUILTIN_WRORWI)
3060 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_rori_pi32 in code.");
3061 	      else if (fcode == ARM_BUILTIN_WRORH)
3062 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_ror_pi16 in code.");
3063 	      else
3064 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_ror_pi32 in code.");
3065 	    }
3066 	  else if ((fcode == ARM_BUILTIN_WRORDI || fcode == ARM_BUILTIN_WRORD)
3067 		   && (imm < 0 || imm > 64))
3068 	    {
3069 	      if (fcode == ARM_BUILTIN_WRORDI)
3070 		error ("the range of count should be in 0 to 64.  please check the intrinsic _mm_rori_si64 in code.");
3071 	      else
3072 		error ("the range of count should be in 0 to 64.  please check the intrinsic _mm_ror_si64 in code.");
3073 	    }
3074 	  else if (imm < 0)
3075 	    {
3076 	      if (fcode == ARM_BUILTIN_WSRLHI)
3077 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_pi16 in code.");
3078 	      else if (fcode == ARM_BUILTIN_WSRLWI)
3079 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_pi32 in code.");
3080 	      else if (fcode == ARM_BUILTIN_WSRLDI)
3081 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_si64 in code.");
3082 	      else if (fcode == ARM_BUILTIN_WSLLHI)
3083 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_pi16 in code.");
3084 	      else if (fcode == ARM_BUILTIN_WSLLWI)
3085 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_pi32 in code.");
3086 	      else if (fcode == ARM_BUILTIN_WSLLDI)
3087 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_si64 in code.");
3088 	      else if (fcode == ARM_BUILTIN_WSRAHI)
3089 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_pi16 in code.");
3090 	      else if (fcode == ARM_BUILTIN_WSRAWI)
3091 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_pi32 in code.");
3092 	      else if (fcode == ARM_BUILTIN_WSRADI)
3093 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_si64 in code.");
3094 	      else if (fcode == ARM_BUILTIN_WSRLH)
3095 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_pi16 in code.");
3096 	      else if (fcode == ARM_BUILTIN_WSRLW)
3097 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_pi32 in code.");
3098 	      else if (fcode == ARM_BUILTIN_WSRLD)
3099 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_si64 in code.");
3100 	      else if (fcode == ARM_BUILTIN_WSLLH)
3101 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_pi16 in code.");
3102 	      else if (fcode == ARM_BUILTIN_WSLLW)
3103 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_pi32 in code.");
3104 	      else if (fcode == ARM_BUILTIN_WSLLD)
3105 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_si64 in code.");
3106 	      else if (fcode == ARM_BUILTIN_WSRAH)
3107 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_pi16 in code.");
3108 	      else if (fcode == ARM_BUILTIN_WSRAW)
3109 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_pi32 in code.");
3110 	      else
3111 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_si64 in code.");
3112 	    }
3113 	}
3114       return arm_expand_binop_builtin (icode, exp, target);
3115 
3116     default:
3117       break;
3118     }
3119 
3120   for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
3121     if (d->code == (enum arm_builtins) fcode)
3122       return arm_expand_binop_builtin (d->icode, exp, target);
3123 
3124   for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
3125     if (d->code == (enum arm_builtins) fcode)
3126       return arm_expand_unop_builtin (d->icode, exp, target, 0);
3127 
3128   for (i = 0, d = bdesc_3arg; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
3129     if (d->code == (enum arm_builtins) fcode)
3130       return arm_expand_ternop_builtin (d->icode, exp, target);
3131 
3132   /* @@@ Should really do something sensible here.  */
3133   return NULL_RTX;
3134 }
3135 
3136 tree
arm_builtin_vectorized_function(unsigned int fn,tree type_out,tree type_in)3137 arm_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in)
3138 {
3139   machine_mode in_mode, out_mode;
3140   int in_n, out_n;
3141   bool out_unsigned_p = TYPE_UNSIGNED (type_out);
3142 
3143   /* Can't provide any vectorized builtins when we can't use NEON.  */
3144   if (!TARGET_NEON)
3145     return NULL_TREE;
3146 
3147   if (TREE_CODE (type_out) != VECTOR_TYPE
3148       || TREE_CODE (type_in) != VECTOR_TYPE)
3149     return NULL_TREE;
3150 
3151   out_mode = TYPE_MODE (TREE_TYPE (type_out));
3152   out_n = TYPE_VECTOR_SUBPARTS (type_out);
3153   in_mode = TYPE_MODE (TREE_TYPE (type_in));
3154   in_n = TYPE_VECTOR_SUBPARTS (type_in);
3155 
3156 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
3157    decl of the vectorized builtin for the appropriate vector mode.
3158    NULL_TREE is returned if no such builtin is available.  */
3159 #undef ARM_CHECK_BUILTIN_MODE
3160 #define ARM_CHECK_BUILTIN_MODE(C)    \
3161   (TARGET_VFP5   \
3162    && flag_unsafe_math_optimizations \
3163    && ARM_CHECK_BUILTIN_MODE_1 (C))
3164 
3165 #undef ARM_CHECK_BUILTIN_MODE_1
3166 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3167   (out_mode == SFmode && out_n == C \
3168    && in_mode == SFmode && in_n == C)
3169 
3170 #undef ARM_FIND_VRINT_VARIANT
3171 #define ARM_FIND_VRINT_VARIANT(N) \
3172   (ARM_CHECK_BUILTIN_MODE (2) \
3173     ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
3174     : (ARM_CHECK_BUILTIN_MODE (4) \
3175       ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
3176       : NULL_TREE))
3177 
3178   switch (fn)
3179     {
3180     CASE_CFN_FLOOR:
3181       return ARM_FIND_VRINT_VARIANT (vrintm);
3182     CASE_CFN_CEIL:
3183       return ARM_FIND_VRINT_VARIANT (vrintp);
3184     CASE_CFN_TRUNC:
3185       return ARM_FIND_VRINT_VARIANT (vrintz);
3186     CASE_CFN_ROUND:
3187       return ARM_FIND_VRINT_VARIANT (vrinta);
3188 #undef ARM_CHECK_BUILTIN_MODE_1
3189 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3190   (out_mode == SImode && out_n == C \
3191    && in_mode == SFmode && in_n == C)
3192 
3193 #define ARM_FIND_VCVT_VARIANT(N) \
3194   (ARM_CHECK_BUILTIN_MODE (2) \
3195    ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
3196    : (ARM_CHECK_BUILTIN_MODE (4) \
3197      ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
3198      : NULL_TREE))
3199 
3200 #define ARM_FIND_VCVTU_VARIANT(N) \
3201   (ARM_CHECK_BUILTIN_MODE (2) \
3202    ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
3203    : (ARM_CHECK_BUILTIN_MODE (4) \
3204      ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
3205      : NULL_TREE))
3206     CASE_CFN_LROUND:
3207       return (out_unsigned_p
3208 	      ? ARM_FIND_VCVTU_VARIANT (vcvta)
3209 	      : ARM_FIND_VCVT_VARIANT (vcvta));
3210     CASE_CFN_LCEIL:
3211       return (out_unsigned_p
3212 	      ? ARM_FIND_VCVTU_VARIANT (vcvtp)
3213 	      : ARM_FIND_VCVT_VARIANT (vcvtp));
3214     CASE_CFN_LFLOOR:
3215       return (out_unsigned_p
3216 	      ? ARM_FIND_VCVTU_VARIANT (vcvtm)
3217 	      : ARM_FIND_VCVT_VARIANT (vcvtm));
3218 #undef ARM_CHECK_BUILTIN_MODE
3219 #define ARM_CHECK_BUILTIN_MODE(C, N) \
3220   (out_mode == N##mode && out_n == C \
3221    && in_mode == N##mode && in_n == C)
3222     case CFN_BUILT_IN_BSWAP16:
3223       if (ARM_CHECK_BUILTIN_MODE (4, HI))
3224 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi, false);
3225       else if (ARM_CHECK_BUILTIN_MODE (8, HI))
3226 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi, false);
3227       else
3228 	return NULL_TREE;
3229     case CFN_BUILT_IN_BSWAP32:
3230       if (ARM_CHECK_BUILTIN_MODE (2, SI))
3231 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si, false);
3232       else if (ARM_CHECK_BUILTIN_MODE (4, SI))
3233 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si, false);
3234       else
3235 	return NULL_TREE;
3236     case CFN_BUILT_IN_BSWAP64:
3237       if (ARM_CHECK_BUILTIN_MODE (2, DI))
3238 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di, false);
3239       else
3240 	return NULL_TREE;
3241     CASE_CFN_COPYSIGN:
3242       if (ARM_CHECK_BUILTIN_MODE (2, SF))
3243 	return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf, false);
3244       else if (ARM_CHECK_BUILTIN_MODE (4, SF))
3245 	return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf, false);
3246       else
3247 	return NULL_TREE;
3248 
3249     default:
3250       return NULL_TREE;
3251     }
3252   return NULL_TREE;
3253 }
3254 #undef ARM_FIND_VCVT_VARIANT
3255 #undef ARM_FIND_VCVTU_VARIANT
3256 #undef ARM_CHECK_BUILTIN_MODE
3257 #undef ARM_FIND_VRINT_VARIANT
3258 
3259 void
arm_atomic_assign_expand_fenv(tree * hold,tree * clear,tree * update)3260 arm_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3261 {
3262   const unsigned ARM_FE_INVALID = 1;
3263   const unsigned ARM_FE_DIVBYZERO = 2;
3264   const unsigned ARM_FE_OVERFLOW = 4;
3265   const unsigned ARM_FE_UNDERFLOW = 8;
3266   const unsigned ARM_FE_INEXACT = 16;
3267   const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT = (ARM_FE_INVALID
3268 						    | ARM_FE_DIVBYZERO
3269 						    | ARM_FE_OVERFLOW
3270 						    | ARM_FE_UNDERFLOW
3271 						    | ARM_FE_INEXACT);
3272   const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT = 8;
3273   tree fenv_var, get_fpscr, set_fpscr, mask, ld_fenv, masked_fenv;
3274   tree new_fenv_var, reload_fenv, restore_fnenv;
3275   tree update_call, atomic_feraiseexcept, hold_fnclex;
3276 
3277   if (!TARGET_HARD_FLOAT)
3278     return;
3279 
3280   /* Generate the equivalent of :
3281        unsigned int fenv_var;
3282        fenv_var = __builtin_arm_get_fpscr ();
3283 
3284        unsigned int masked_fenv;
3285        masked_fenv = fenv_var & mask;
3286 
3287        __builtin_arm_set_fpscr (masked_fenv);  */
3288 
3289   fenv_var = create_tmp_var_raw (unsigned_type_node);
3290   get_fpscr = arm_builtin_decls[ARM_BUILTIN_GET_FPSCR];
3291   set_fpscr = arm_builtin_decls[ARM_BUILTIN_SET_FPSCR];
3292   mask = build_int_cst (unsigned_type_node,
3293 			~((ARM_FE_ALL_EXCEPT << ARM_FE_EXCEPT_SHIFT)
3294 			  | ARM_FE_ALL_EXCEPT));
3295   ld_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
3296 		    fenv_var, build_call_expr (get_fpscr, 0));
3297   masked_fenv = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var, mask);
3298   hold_fnclex = build_call_expr (set_fpscr, 1, masked_fenv);
3299   *hold = build2 (COMPOUND_EXPR, void_type_node,
3300 		  build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3301 		  hold_fnclex);
3302 
3303   /* Store the value of masked_fenv to clear the exceptions:
3304      __builtin_arm_set_fpscr (masked_fenv);  */
3305 
3306   *clear = build_call_expr (set_fpscr, 1, masked_fenv);
3307 
3308   /* Generate the equivalent of :
3309        unsigned int new_fenv_var;
3310        new_fenv_var = __builtin_arm_get_fpscr ();
3311 
3312        __builtin_arm_set_fpscr (fenv_var);
3313 
3314        __atomic_feraiseexcept (new_fenv_var);  */
3315 
3316   new_fenv_var = create_tmp_var_raw (unsigned_type_node);
3317   reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node, new_fenv_var,
3318 			build_call_expr (get_fpscr, 0));
3319   restore_fnenv = build_call_expr (set_fpscr, 1, fenv_var);
3320   atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3321   update_call = build_call_expr (atomic_feraiseexcept, 1,
3322 				 fold_convert (integer_type_node, new_fenv_var));
3323   *update = build2 (COMPOUND_EXPR, void_type_node,
3324 		    build2 (COMPOUND_EXPR, void_type_node,
3325 			    reload_fenv, restore_fnenv), update_call);
3326 }
3327 
3328 #include "gt-arm-builtins.h"
3329